update
Some checks failed
Pipeline: Test, Lint, Build / Get version info (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test JS code (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint i18n files (push) Has been cancelled
Pipeline: Test, Lint, Build / Check Docker configuration (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v5) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v6) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v7) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to GHCR (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to Docker Hub (push) Has been cancelled
Pipeline: Test, Lint, Build / Cleanup digest artifacts (push) Has been cancelled
Pipeline: Test, Lint, Build / Build Windows installers (push) Has been cancelled
Pipeline: Test, Lint, Build / Package/Release (push) Has been cancelled
Pipeline: Test, Lint, Build / Upload Linux PKG (push) Has been cancelled
Close stale issues and PRs / stale (push) Has been cancelled
POEditor import / update-translations (push) Has been cancelled
Some checks failed
Pipeline: Test, Lint, Build / Get version info (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test JS code (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint i18n files (push) Has been cancelled
Pipeline: Test, Lint, Build / Check Docker configuration (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v5) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v6) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v7) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to GHCR (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to Docker Hub (push) Has been cancelled
Pipeline: Test, Lint, Build / Cleanup digest artifacts (push) Has been cancelled
Pipeline: Test, Lint, Build / Build Windows installers (push) Has been cancelled
Pipeline: Test, Lint, Build / Package/Release (push) Has been cancelled
Pipeline: Test, Lint, Build / Upload Linux PKG (push) Has been cancelled
Close stale issues and PRs / stale (push) Has been cancelled
POEditor import / update-translations (push) Has been cancelled
This commit is contained in:
27
.devcontainer/Dockerfile
Normal file
27
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.148.1/containers/go/.devcontainer/base.Dockerfile
|
||||||
|
|
||||||
|
# [Choice] Go version: 1, 1.15, 1.14
|
||||||
|
ARG VARIANT="1"
|
||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/go:${VARIANT}
|
||||||
|
|
||||||
|
# [Option] Install Node.js
|
||||||
|
ARG INSTALL_NODE="true"
|
||||||
|
ARG NODE_VERSION="lts/*"
|
||||||
|
RUN if [ "${INSTALL_NODE}" = "true" ]; then su vscode -c "source /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
||||||
|
|
||||||
|
# Install additional OS packages
|
||||||
|
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
&& apt-get -y install --no-install-recommends ffmpeg
|
||||||
|
|
||||||
|
# Install TagLib from cross-taglib releases
|
||||||
|
ARG CROSS_TAGLIB_VERSION="2.1.1-1"
|
||||||
|
ARG TARGETARCH
|
||||||
|
RUN DOWNLOAD_ARCH="linux-${TARGETARCH}" \
|
||||||
|
&& wget -q "https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/taglib-${DOWNLOAD_ARCH}.tar.gz" -O /tmp/cross-taglib.tar.gz \
|
||||||
|
&& tar -xzf /tmp/cross-taglib.tar.gz -C /usr --strip-components=1 \
|
||||||
|
&& mv /usr/include/taglib/* /usr/include/ \
|
||||||
|
&& rmdir /usr/include/taglib \
|
||||||
|
&& rm /tmp/cross-taglib.tar.gz /usr/provenance.json
|
||||||
|
|
||||||
|
# [Optional] Uncomment this line to install global node packages.
|
||||||
|
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||||
64
.devcontainer/devcontainer.json
Normal file
64
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
{
|
||||||
|
"name": "Go",
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
"args": {
|
||||||
|
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||||
|
"VARIANT": "1.25",
|
||||||
|
// Options
|
||||||
|
"INSTALL_NODE": "true",
|
||||||
|
"NODE_VERSION": "v24",
|
||||||
|
"CROSS_TAGLIB_VERSION": "2.1.1-1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workspaceMount": "",
|
||||||
|
"runArgs": [
|
||||||
|
"--cap-add=SYS_PTRACE",
|
||||||
|
"--security-opt",
|
||||||
|
"seccomp=unconfined",
|
||||||
|
"--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z"
|
||||||
|
],
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash",
|
||||||
|
"go.useGoProxyToCheckForToolUpdates": false,
|
||||||
|
"go.useLanguageServer": true,
|
||||||
|
"go.gopath": "/go",
|
||||||
|
"go.goroot": "/usr/local/go",
|
||||||
|
"go.toolsGopath": "/go/bin",
|
||||||
|
"go.formatTool": "goimports",
|
||||||
|
"go.lintOnSave": "package",
|
||||||
|
"go.lintTool": "golangci-lint",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"[javascript]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
|
"[json]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
|
"[jsonc]": {
|
||||||
|
"editor.defaultFormatter": "vscode.json-language-features"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"golang.Go",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"tamasfe.even-better-toml"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
"forwardPorts": [
|
||||||
|
4533,
|
||||||
|
4633
|
||||||
|
],
|
||||||
|
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||||
|
"remoteUser": "vscode",
|
||||||
|
"remoteEnv": {
|
||||||
|
"ND_MUSICFOLDER": "./music",
|
||||||
|
"ND_DATAFOLDER": "./data"
|
||||||
|
}
|
||||||
|
}
|
||||||
18
.dockerignore
Normal file
18
.dockerignore
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
.DS_Store
|
||||||
|
ui/node_modules
|
||||||
|
ui/build
|
||||||
|
!ui/build/.gitkeep
|
||||||
|
Dockerfile
|
||||||
|
docker-compose*.yml
|
||||||
|
data
|
||||||
|
*.db
|
||||||
|
testDB
|
||||||
|
navidrome
|
||||||
|
navidrome.toml
|
||||||
|
tmp
|
||||||
|
!tmp/taglib
|
||||||
|
dist
|
||||||
|
binaries
|
||||||
|
cache
|
||||||
|
music
|
||||||
|
!Dockerfile
|
||||||
4
.git-blame-ignore-revs
Normal file
4
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Upgrade Prettier to 2.0.4. Reformatted all JS files
|
||||||
|
b3f70538a9138bc279a451f4f358605097210d41
|
||||||
|
# Move project to Navidrome GitHub organization
|
||||||
|
6ee45a9ccc5e7ea4290c89030e67c99c0514bd25
|
||||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: deluan
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: deluan
|
||||||
|
liberapay: deluan
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||||
103
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
103
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
name: Bug Report
|
||||||
|
description: Before opening a new issue, please search to see if an issue already exists for the bug you encountered.
|
||||||
|
title: "[Bug]: "
|
||||||
|
labels: ["bug", "triage"]
|
||||||
|
#assignees:
|
||||||
|
# - deluan
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Thanks for taking the time to fill out this bug report!
|
||||||
|
- type: checkboxes
|
||||||
|
id: requirements
|
||||||
|
attributes:
|
||||||
|
label: "I confirm that:"
|
||||||
|
options:
|
||||||
|
- label: I have searched the existing [open AND closed issues](https://github.com/navidrome/navidrome/issues?q=is%3Aissue) to see if an issue already exists for the bug I've encountered
|
||||||
|
required: true
|
||||||
|
- label: I'm using the latest version (your issue may have been fixed already)
|
||||||
|
required: false
|
||||||
|
- type: input
|
||||||
|
id: version
|
||||||
|
attributes:
|
||||||
|
label: Version
|
||||||
|
description: What version of Navidrome are you running? (please try upgrading first, as your issue may have been fixed already).
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Current Behavior
|
||||||
|
description: A concise description of what you're experiencing.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected Behavior
|
||||||
|
description: A concise description of what you expected to happen.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps To Reproduce
|
||||||
|
description: Steps to reproduce the behavior.
|
||||||
|
placeholder: |
|
||||||
|
1. In this scenario...
|
||||||
|
2. With this config...
|
||||||
|
3. Click (or Execute) '...'
|
||||||
|
4. See error...
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
id: env
|
||||||
|
attributes:
|
||||||
|
label: Environment
|
||||||
|
description: |
|
||||||
|
examples:
|
||||||
|
- **OS**: Ubuntu 20.04
|
||||||
|
- **Browser**: Chrome 110.0.5481.177 on Windows 11
|
||||||
|
- **Client**: DSub 5.5.1
|
||||||
|
value: |
|
||||||
|
- OS:
|
||||||
|
- Browser:
|
||||||
|
- Client:
|
||||||
|
render: markdown
|
||||||
|
- type: dropdown
|
||||||
|
id: distribution
|
||||||
|
attributes:
|
||||||
|
label: How Navidrome is installed?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- Docker
|
||||||
|
- Binary (from downloads page)
|
||||||
|
- Package
|
||||||
|
- Built from sources
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: config
|
||||||
|
attributes:
|
||||||
|
label: Configuration
|
||||||
|
description: Please copy and paste your `navidrome.toml` (and/or `docker-compose.yml`) configuration. This will be automatically formatted into code, so no need for backticks.
|
||||||
|
render: toml
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Relevant log output
|
||||||
|
description: Please copy and paste any relevant log output (change your `LogLevel` (`ND_LOGLEVEL`) to debug). This will be automatically formatted into code, so no need for backticks. ([Where I can find the logs?](https://www.navidrome.org/docs/faq/#where-are-the-logs))
|
||||||
|
render: shell
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Anything else?
|
||||||
|
description: |
|
||||||
|
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach screenshots by clicking this area to highlight it and then dragging files in.
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/navidrome/navidrome/blob/master/CODE_OF_CONDUCT.md).
|
||||||
|
options:
|
||||||
|
- label: I agree to follow Navidrome's Code of Conduct
|
||||||
|
required: true
|
||||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: Ideas for new features
|
||||||
|
url: https://github.com/navidrome/navidrome/discussions/categories/ideas
|
||||||
|
about: This is the place to share and discuss new ideas and potentially new features.
|
||||||
|
- name: Support requests
|
||||||
|
url: https://github.com/navidrome/navidrome/discussions/categories/q-a
|
||||||
|
about: This is the place to ask questions.
|
||||||
23
.github/actions/download-taglib/action.yml
vendored
Normal file
23
.github/actions/download-taglib/action.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: 'Download TagLib'
|
||||||
|
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version of TagLib to download'
|
||||||
|
required: true
|
||||||
|
platform:
|
||||||
|
description: 'Platform to download TagLib for'
|
||||||
|
default: 'linux-amd64'
|
||||||
|
runs:
|
||||||
|
using: 'composite'
|
||||||
|
steps:
|
||||||
|
- name: Download TagLib
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/taglib
|
||||||
|
cd /tmp
|
||||||
|
FILE=taglib-${{ inputs.platform }}.tar.gz
|
||||||
|
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
|
||||||
|
tar -xzf ${FILE} -C taglib
|
||||||
|
PKG_CONFIG_PREFIX=/tmp/taglib
|
||||||
|
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
|
||||||
|
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV
|
||||||
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
name: 'Prepare Docker Buildx environment'
|
||||||
|
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||||
|
inputs:
|
||||||
|
github_token:
|
||||||
|
description: 'GitHub token'
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
hub_repository:
|
||||||
|
description: 'Docker Hub repository to push images to'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
hub_username:
|
||||||
|
description: 'Docker Hub username'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
hub_password:
|
||||||
|
description: 'Docker Hub password'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
outputs:
|
||||||
|
tags:
|
||||||
|
description: 'Docker image tags'
|
||||||
|
value: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels:
|
||||||
|
description: 'Docker image labels'
|
||||||
|
value: ${{ steps.meta.outputs.labels }}
|
||||||
|
annotations:
|
||||||
|
description: 'Docker image annotations'
|
||||||
|
value: ${{ steps.meta.outputs.annotations }}
|
||||||
|
version:
|
||||||
|
description: 'Docker image version'
|
||||||
|
value: ${{ steps.meta.outputs.version }}
|
||||||
|
hub_repository:
|
||||||
|
description: 'Docker Hub repository'
|
||||||
|
value: ${{ env.DOCKER_HUB_REPO }}
|
||||||
|
hub_enabled:
|
||||||
|
description: 'Is Docker Hub enabled'
|
||||||
|
value: ${{ env.DOCKER_HUB_ENABLED }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: 'composite'
|
||||||
|
steps:
|
||||||
|
- name: Check Docker Hub configuration
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ -z "${{inputs.hub_repository}}" ]; then
|
||||||
|
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
|
||||||
|
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
|
||||||
|
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: inputs.hub_username != '' && inputs.hub_password != ''
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ inputs.hub_username }}
|
||||||
|
password: ${{ inputs.hub_password }}
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ inputs.github_token }}
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker image
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
labels: |
|
||||||
|
maintainer=deluan@navidrome.org
|
||||||
|
images: |
|
||||||
|
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
|
||||||
|
name=ghcr.io/${{ github.repository }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=raw,value=develop,enable={{is_default_branch}}
|
||||||
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/ui"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: gomod
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: docker
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: "/.github/workflows"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
open-pull-requests-limit: 10
|
||||||
38
.github/pull_request_template.md
vendored
Normal file
38
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
### Description
|
||||||
|
<!-- Please provide a clear and concise description of what this PR does and why it is needed. -->
|
||||||
|
|
||||||
|
### Related Issues
|
||||||
|
<!-- List any related issues, e.g., "Fixes #123" or "Related to #456". -->
|
||||||
|
|
||||||
|
### Type of Change
|
||||||
|
- [ ] Bug fix
|
||||||
|
- [ ] New feature
|
||||||
|
- [ ] Documentation update
|
||||||
|
- [ ] Refactor
|
||||||
|
- [ ] Other (please describe):
|
||||||
|
|
||||||
|
### Checklist
|
||||||
|
Please review and check all that apply:
|
||||||
|
|
||||||
|
- [ ] My code follows the project’s coding style
|
||||||
|
- [ ] I have tested the changes locally
|
||||||
|
- [ ] I have added or updated documentation as needed
|
||||||
|
- [ ] I have added tests that prove my fix/feature works (or explain why not)
|
||||||
|
- [ ] All existing and new tests pass
|
||||||
|
|
||||||
|
### How to Test
|
||||||
|
<!-- Describe the steps to test your changes. Include setup, commands, and expected results. -->
|
||||||
|
|
||||||
|
### Screenshots / Demos (if applicable)
|
||||||
|
<!-- Add screenshots, GIFs, or links to demos if your change includes UI updates or visual changes. -->
|
||||||
|
|
||||||
|
### Additional Notes
|
||||||
|
<!-- Anything else the maintainer should know? Potential side effects, breaking changes, or areas of concern? -->
|
||||||
|
|
||||||
|
<!--
|
||||||
|
**Tips for Contributors:**
|
||||||
|
- Be concise but thorough.
|
||||||
|
- If your PR is large, consider breaking it into smaller PRs.
|
||||||
|
- Tag the maintainer if you need a prompt review.
|
||||||
|
- Avoid force pushing to the branch after opening the PR, as it can complicate the review process.
|
||||||
|
-->
|
||||||
BIN
.github/screenshots/ss-desktop-player.png
vendored
Normal file
BIN
.github/screenshots/ss-desktop-player.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.7 MiB |
BIN
.github/screenshots/ss-mobile-album-view.png
vendored
Normal file
BIN
.github/screenshots/ss-mobile-album-view.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 223 KiB |
BIN
.github/screenshots/ss-mobile-login.png
vendored
Normal file
BIN
.github/screenshots/ss-mobile-login.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 735 KiB |
BIN
.github/screenshots/ss-mobile-player.png
vendored
Normal file
BIN
.github/screenshots/ss-mobile-player.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 885 KiB |
54
.github/workflows/download-link-on-pr.yml
vendored
Normal file
54
.github/workflows/download-link-on-pr.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
name: Add download link to PR
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ['Pipeline: Test, Lint, Build']
|
||||||
|
types: [completed]
|
||||||
|
jobs:
|
||||||
|
pr_comment:
|
||||||
|
if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/github-script@v3
|
||||||
|
with:
|
||||||
|
# This snippet is public-domain, taken from
|
||||||
|
# https://github.com/oprypin/nightly.link/blob/master/.github/workflows/pr-comment.yml
|
||||||
|
script: |
|
||||||
|
const {owner, repo} = context.repo;
|
||||||
|
const run_id = ${{github.event.workflow_run.id}};
|
||||||
|
const pull_head_sha = '${{github.event.workflow_run.head_sha}}';
|
||||||
|
const pull_user_id = ${{github.event.sender.id}};
|
||||||
|
|
||||||
|
const issue_number = await (async () => {
|
||||||
|
const pulls = await github.pulls.list({owner, repo});
|
||||||
|
for await (const {data} of github.paginate.iterator(pulls)) {
|
||||||
|
for (const pull of data) {
|
||||||
|
if (pull.head.sha === pull_head_sha && pull.user.id === pull_user_id) {
|
||||||
|
return pull.number;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
if (issue_number) {
|
||||||
|
core.info(`Using pull request ${issue_number}`);
|
||||||
|
} else {
|
||||||
|
return core.error(`No matching pull request found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const {data: {artifacts}} = await github.actions.listWorkflowRunArtifacts({owner, repo, run_id});
|
||||||
|
if (!artifacts.length) {
|
||||||
|
return core.error(`No artifacts found`);
|
||||||
|
}
|
||||||
|
let body = `Download the artifacts for this pull request:\n`;
|
||||||
|
for (const art of artifacts) {
|
||||||
|
body += `\n* [${art.name}.zip](https://nightly.link/${owner}/${repo}/actions/artifacts/${art.id}.zip)`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const {data: comments} = await github.issues.listComments({repo, owner, issue_number});
|
||||||
|
const existing_comment = comments.find((c) => c.user.login === 'github-actions[bot]');
|
||||||
|
if (existing_comment) {
|
||||||
|
core.info(`Updating comment ${existing_comment.id}`);
|
||||||
|
await github.issues.updateComment({repo, owner, comment_id: existing_comment.id, body});
|
||||||
|
} else {
|
||||||
|
core.info(`Creating a comment`);
|
||||||
|
await github.issues.createComment({repo, owner, issue_number, body});
|
||||||
|
}
|
||||||
467
.github/workflows/pipeline.yml
vendored
Normal file
467
.github/workflows/pipeline.yml
vendored
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
name: "Pipeline: Test, Lint, Build"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
CROSS_TAGLIB_VERSION: "2.1.1-1"
|
||||||
|
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
git-version:
|
||||||
|
name: Get version info
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
|
||||||
|
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
fetch-tags: true
|
||||||
|
|
||||||
|
- name: Show git version info
|
||||||
|
run: |
|
||||||
|
echo "git describe (dirty): $(git describe --dirty --always --tags)"
|
||||||
|
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
|
||||||
|
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
|
||||||
|
echo "github_ref: $GITHUB_REF"
|
||||||
|
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
|
||||||
|
git tag -l
|
||||||
|
- name: Determine git current SHA and latest tag
|
||||||
|
id: git-version
|
||||||
|
run: |
|
||||||
|
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
|
||||||
|
if [ -n "$GIT_TAG" ]; then
|
||||||
|
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
|
||||||
|
GIT_TAG=${GIT_TAG}-SNAPSHOT
|
||||||
|
fi
|
||||||
|
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
GIT_SHA=$(git rev-parse --short HEAD)
|
||||||
|
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||||
|
if [[ $PR_NUM != "null" ]]; then
|
||||||
|
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
|
||||||
|
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
|
||||||
|
fi
|
||||||
|
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "GIT_TAG=$GIT_TAG"
|
||||||
|
echo "GIT_SHA=$GIT_SHA"
|
||||||
|
|
||||||
|
go-lint:
|
||||||
|
name: Lint Go code
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Download TagLib
|
||||||
|
uses: ./.github/actions/download-taglib
|
||||||
|
with:
|
||||||
|
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||||
|
|
||||||
|
- name: golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v9
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
problem-matchers: true
|
||||||
|
args: --timeout 2m
|
||||||
|
|
||||||
|
- name: Run go goimports
|
||||||
|
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$' | grep -v '.pb.go$'`
|
||||||
|
- run: go mod tidy
|
||||||
|
- name: Verify no changes from goimports and go mod tidy
|
||||||
|
run: |
|
||||||
|
git status --porcelain
|
||||||
|
if [ -n "$(git status --porcelain)" ]; then
|
||||||
|
echo 'To fix this check, run "make format" and commit the changes'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
go:
|
||||||
|
name: Test Go code
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code into the Go module directory
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Download TagLib
|
||||||
|
uses: ./.github/actions/download-taglib
|
||||||
|
with:
|
||||||
|
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||||
|
|
||||||
|
- name: Download dependencies
|
||||||
|
run: go mod download
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||||
|
go test -shuffle=on -tags netgo -race ./... -v
|
||||||
|
|
||||||
|
js:
|
||||||
|
name: Test JS code
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: 24
|
||||||
|
cache: "npm"
|
||||||
|
cache-dependency-path: "**/package-lock.json"
|
||||||
|
|
||||||
|
- name: npm install dependencies
|
||||||
|
run: |
|
||||||
|
cd ui
|
||||||
|
npm ci
|
||||||
|
|
||||||
|
- name: npm lint
|
||||||
|
run: |
|
||||||
|
cd ui
|
||||||
|
npm run check-formatting && npm run lint
|
||||||
|
|
||||||
|
- name: npm test
|
||||||
|
run: |
|
||||||
|
cd ui
|
||||||
|
npm test
|
||||||
|
|
||||||
|
- name: npm build
|
||||||
|
run: |
|
||||||
|
cd ui
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
i18n-lint:
|
||||||
|
name: Lint i18n files
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- run: |
|
||||||
|
set -e
|
||||||
|
for file in resources/i18n/*.json; do
|
||||||
|
echo "Validating $file"
|
||||||
|
if ! jq empty "$file" 2>error.log; then
|
||||||
|
error_message=$(cat error.log)
|
||||||
|
line_number=$(echo "$error_message" | grep -oP 'line \K[0-9]+')
|
||||||
|
echo "::error file=$file,line=$line_number::$error_message"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
- run: ./.github/workflows/validate-translations.sh -v
|
||||||
|
|
||||||
|
|
||||||
|
check-push-enabled:
|
||||||
|
name: Check Docker configuration
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
is_enabled: ${{ steps.check.outputs.is_enabled }}
|
||||||
|
steps:
|
||||||
|
- name: Check if Docker push is configured
|
||||||
|
id: check
|
||||||
|
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
|
||||||
|
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
|
||||||
|
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
|
||||||
|
DOCKER_BUILD_SUMMARY: false
|
||||||
|
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
|
||||||
|
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||||
|
steps:
|
||||||
|
- name: Sanitize platform name
|
||||||
|
id: set-platform
|
||||||
|
run: |
|
||||||
|
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
|
||||||
|
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Prepare Docker Buildx
|
||||||
|
uses: ./.github/actions/prepare-docker
|
||||||
|
id: docker
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||||
|
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Build Binaries
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile
|
||||||
|
platforms: ${{ matrix.platform }}
|
||||||
|
outputs: |
|
||||||
|
type=local,dest=./output/${{ env.PLATFORM }}
|
||||||
|
target: binary
|
||||||
|
build-args: |
|
||||||
|
GIT_SHA=${{ env.GIT_SHA }}
|
||||||
|
GIT_TAG=${{ env.GIT_TAG }}
|
||||||
|
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||||
|
|
||||||
|
- name: Upload Binaries
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
with:
|
||||||
|
name: navidrome-${{ env.PLATFORM }}
|
||||||
|
path: ./output
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Build and push image by digest
|
||||||
|
id: push-image
|
||||||
|
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile
|
||||||
|
platforms: ${{ matrix.platform }}
|
||||||
|
labels: ${{ steps.docker.outputs.labels }}
|
||||||
|
build-args: |
|
||||||
|
GIT_SHA=${{ env.GIT_SHA }}
|
||||||
|
GIT_TAG=${{ env.GIT_TAG }}
|
||||||
|
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||||
|
outputs: |
|
||||||
|
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
|
||||||
|
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
|
||||||
|
|
||||||
|
- name: Export digest
|
||||||
|
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/digests
|
||||||
|
digest="${{ steps.push-image.outputs.digest }}"
|
||||||
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
|
- name: Upload digest
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||||
|
with:
|
||||||
|
name: digests-${{ env.PLATFORM }}
|
||||||
|
path: /tmp/digests/*
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
push-manifest-ghcr:
|
||||||
|
name: Push to GHCR
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build, check-push-enabled]
|
||||||
|
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||||
|
env:
|
||||||
|
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v6
|
||||||
|
with:
|
||||||
|
path: /tmp/digests
|
||||||
|
pattern: digests-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Prepare Docker Buildx
|
||||||
|
uses: ./.github/actions/prepare-docker
|
||||||
|
id: docker
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Create manifest list and push to ghcr.io
|
||||||
|
working-directory: /tmp/digests
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io"))) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
|
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||||
|
|
||||||
|
- name: Inspect image in ghcr.io
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||||
|
|
||||||
|
push-manifest-dockerhub:
|
||||||
|
name: Push to Docker Hub
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
needs: [build, check-push-enabled]
|
||||||
|
if: needs.check-push-enabled.outputs.is_enabled == 'true' && vars.DOCKER_HUB_REPO != ''
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v6
|
||||||
|
with:
|
||||||
|
path: /tmp/digests
|
||||||
|
pattern: digests-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Prepare Docker Buildx
|
||||||
|
uses: ./.github/actions/prepare-docker
|
||||||
|
id: docker
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||||
|
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Create manifest list and push to Docker Hub
|
||||||
|
uses: nick-fields/retry@v3
|
||||||
|
with:
|
||||||
|
timeout_minutes: 5
|
||||||
|
max_attempts: 3
|
||||||
|
retry_wait_seconds: 30
|
||||||
|
command: |
|
||||||
|
cd /tmp/digests
|
||||||
|
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io") | not)) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
|
$(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
|
||||||
|
|
||||||
|
- name: Inspect image in Docker Hub
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||||
|
|
||||||
|
cleanup-digests:
|
||||||
|
name: Cleanup digest artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [push-manifest-ghcr, push-manifest-dockerhub]
|
||||||
|
if: always() && needs.push-manifest-ghcr.result == 'success'
|
||||||
|
steps:
|
||||||
|
- name: Delete unnecessary digest artifacts
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
|
||||||
|
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||||
|
done
|
||||||
|
|
||||||
|
msi:
|
||||||
|
name: Build Windows installers
|
||||||
|
needs: [build, git-version]
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v6
|
||||||
|
with:
|
||||||
|
path: ./binaries
|
||||||
|
pattern: navidrome-windows*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Install Wix
|
||||||
|
run: sudo apt-get install -y wixl jq
|
||||||
|
|
||||||
|
- name: Build MSI
|
||||||
|
env:
|
||||||
|
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||||
|
run: |
|
||||||
|
rm -rf binaries/msi
|
||||||
|
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
|
||||||
|
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
|
||||||
|
du -h binaries/msi/*.msi
|
||||||
|
|
||||||
|
- name: Upload MSI files
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
with:
|
||||||
|
name: navidrome-windows-installers
|
||||||
|
path: binaries/msi/*.msi
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Package/Release
|
||||||
|
needs: [build, msi]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
package_list: ${{ steps.set-package-list.outputs.package_list }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
fetch-tags: true
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v6
|
||||||
|
with:
|
||||||
|
path: ./binaries
|
||||||
|
pattern: navidrome-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- run: ls -lR ./binaries
|
||||||
|
|
||||||
|
- name: Set RELEASE_FLAGS for snapshot releases
|
||||||
|
if: env.IS_RELEASE == 'false'
|
||||||
|
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Run GoReleaser
|
||||||
|
uses: goreleaser/goreleaser-action@v6
|
||||||
|
with:
|
||||||
|
version: '~> v2'
|
||||||
|
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Remove build artifacts
|
||||||
|
run: |
|
||||||
|
ls -l ./dist
|
||||||
|
rm ./dist/*.tar.gz ./dist/*.zip
|
||||||
|
|
||||||
|
- name: Upload all-packages artifact
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
with:
|
||||||
|
name: packages
|
||||||
|
path: dist/navidrome_0*
|
||||||
|
|
||||||
|
- id: set-package-list
|
||||||
|
name: Export list of generated packages
|
||||||
|
run: |
|
||||||
|
cd dist
|
||||||
|
set +x
|
||||||
|
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
|
||||||
|
echo $ITEMS
|
||||||
|
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
upload-packages:
|
||||||
|
name: Upload Linux PKG
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [release]
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||||
|
steps:
|
||||||
|
- name: Download all-packages artifact
|
||||||
|
uses: actions/download-artifact@v6
|
||||||
|
with:
|
||||||
|
name: packages
|
||||||
|
path: ./dist
|
||||||
|
|
||||||
|
- name: Upload all-packages artifact
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
with:
|
||||||
|
name: navidrome_linux_${{ matrix.item }}
|
||||||
|
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||||
|
|
||||||
|
# delete-artifacts:
|
||||||
|
# name: Delete unused artifacts
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# needs: [upload-packages]
|
||||||
|
# steps:
|
||||||
|
# - name: Delete all-packages artifact
|
||||||
|
# env:
|
||||||
|
# GH_TOKEN: ${{ github.token }}
|
||||||
|
# run: |
|
||||||
|
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
|
||||||
|
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||||
|
# done
|
||||||
56
.github/workflows/stale.yml
vendored
Normal file
56
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: 'Close stale issues and PRs'
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '30 1 * * *'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: dessant/lock-threads@v5
|
||||||
|
with:
|
||||||
|
process-only: 'issues, prs'
|
||||||
|
issue-inactive-days: 120
|
||||||
|
pr-inactive-days: 120
|
||||||
|
log-output: true
|
||||||
|
add-issue-labels: 'frozen-due-to-age'
|
||||||
|
add-pr-labels: 'frozen-due-to-age'
|
||||||
|
issue-comment: >
|
||||||
|
This issue has been automatically locked since there
|
||||||
|
has not been any recent activity after it was closed.
|
||||||
|
Please open a new issue for related bugs.
|
||||||
|
pr-comment: >
|
||||||
|
This pull request has been automatically locked since there
|
||||||
|
has not been any recent activity after it was closed.
|
||||||
|
Please open a new issue for related bugs.
|
||||||
|
- uses: actions/stale@v9
|
||||||
|
with:
|
||||||
|
operations-per-run: 999
|
||||||
|
days-before-issue-stale: 180
|
||||||
|
days-before-pr-stale: 180
|
||||||
|
days-before-issue-close: 30
|
||||||
|
days-before-pr-close: 30
|
||||||
|
stale-issue-message: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. The resources of the Navidrome team are limited, and so we are asking for your help.
|
||||||
|
|
||||||
|
If this is a **bug** and you can still reproduce this error on the <code>master</code> branch, please reply with all of the information you have about it in order to keep the issue open.
|
||||||
|
|
||||||
|
If this is a **feature request**, and you feel that it is still relevant and valuable, please tell us why.
|
||||||
|
|
||||||
|
This issue will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
|
||||||
|
stale-pr-message: This PR has been automatically marked as stale because it has not had
|
||||||
|
recent activity. The resources of the Navidrome team are limited, and so we are asking for your help.
|
||||||
|
|
||||||
|
Please check https://github.com/navidrome/navidrome/blob/master/CONTRIBUTING.md#pull-requests and verify that this code contribution fits with the description. If yes, tell it in a comment.
|
||||||
|
|
||||||
|
This PR will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
|
||||||
|
stale-issue-label: 'stale'
|
||||||
|
exempt-issue-labels: 'keep,security'
|
||||||
|
stale-pr-label: 'stale'
|
||||||
|
exempt-pr-labels: 'keep,security'
|
||||||
93
.github/workflows/update-translations.sh
vendored
Executable file
93
.github/workflows/update-translations.sh
vendored
Executable file
@@ -0,0 +1,93 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
I18N_DIR=resources/i18n
|
||||||
|
|
||||||
|
# Function to process JSON: remove empty attributes and sort
|
||||||
|
process_json() {
|
||||||
|
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check differences between local and remote translations
|
||||||
|
check_lang_diff() {
|
||||||
|
filename=${I18N_DIR}/"$1".json
|
||||||
|
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||||
|
-d api_token="${POEDITOR_APIKEY}" \
|
||||||
|
-d action="export" \
|
||||||
|
-d id="${POEDITOR_PROJECTID}" \
|
||||||
|
-d language="$1" \
|
||||||
|
-d type="key_value_json" | jq -r .item)
|
||||||
|
if [ -z "$url" ]; then
|
||||||
|
echo "Failed to export $1"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
curl -sSL "$url" > poeditor.json
|
||||||
|
|
||||||
|
process_json "$filename" > "$filename".tmp
|
||||||
|
process_json poeditor.json > poeditor.tmp
|
||||||
|
|
||||||
|
diff=$(diff -u "$filename".tmp poeditor.tmp) || true
|
||||||
|
if [ -n "$diff" ]; then
|
||||||
|
echo "$diff"
|
||||||
|
mv poeditor.json "$filename"
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to get the list of languages
|
||||||
|
get_language_list() {
|
||||||
|
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||||
|
-d api_token="${POEDITOR_APIKEY}" \
|
||||||
|
-d id="${POEDITOR_PROJECTID}")
|
||||||
|
|
||||||
|
echo $response
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to get the language name from the language code
|
||||||
|
get_language_name() {
|
||||||
|
lang_code="$1"
|
||||||
|
lang_list="$2"
|
||||||
|
|
||||||
|
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||||
|
|
||||||
|
if [ -z "$lang_name" ]; then
|
||||||
|
echo "Error: Language code '$lang_code' not found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$lang_name"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to get the language code from the file path
|
||||||
|
get_lang_code() {
|
||||||
|
filepath="$1"
|
||||||
|
# Extract just the filename
|
||||||
|
filename=$(basename "$filepath")
|
||||||
|
|
||||||
|
# Remove the extension
|
||||||
|
lang_code="${filename%.*}"
|
||||||
|
|
||||||
|
echo "$lang_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
lang_list=$(get_language_list)
|
||||||
|
|
||||||
|
# Check differences for each language
|
||||||
|
for file in ${I18N_DIR}/*.json; do
|
||||||
|
code=$(get_lang_code "$file")
|
||||||
|
lang=$(jq -r .languageName < "$file")
|
||||||
|
lang_name=$(get_language_name "$code" "$lang_list")
|
||||||
|
echo "Downloading $lang_name - $lang ($code)"
|
||||||
|
check_lang_diff "$code"
|
||||||
|
done
|
||||||
|
|
||||||
|
# List changed languages to stderr
|
||||||
|
languages=""
|
||||||
|
for file in $(git diff --name-only --exit-code | grep json); do
|
||||||
|
lang_code=$(get_lang_code "$file")
|
||||||
|
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||||
|
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||||
|
done
|
||||||
|
echo "${languages%??}" 1>&2
|
||||||
33
.github/workflows/update-translations.yml
vendored
Normal file
33
.github/workflows/update-translations.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: POEditor import
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 10 * * *'
|
||||||
|
jobs:
|
||||||
|
update-translations:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository_owner == 'navidrome' }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Get updated translations
|
||||||
|
id: poeditor
|
||||||
|
env:
|
||||||
|
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||||
|
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||||
|
run: |
|
||||||
|
.github/workflows/update-translations.sh 2> title.tmp
|
||||||
|
title=$(cat title.tmp)
|
||||||
|
echo "::set-output name=title::$title"
|
||||||
|
rm title.tmp
|
||||||
|
- name: Show changes, if any
|
||||||
|
run: |
|
||||||
|
git status --porcelain
|
||||||
|
git diff
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.PAT }}
|
||||||
|
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||||
|
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||||
|
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||||
|
branch: update-translations
|
||||||
236
.github/workflows/validate-translations.sh
vendored
Executable file
236
.github/workflows/validate-translations.sh
vendored
Executable file
@@ -0,0 +1,236 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# validate-translations.sh
|
||||||
|
#
|
||||||
|
# This script validates the structure of JSON translation files by comparing them
|
||||||
|
# against the reference English translation file (ui/src/i18n/en.json).
|
||||||
|
#
|
||||||
|
# The script performs the following validations:
|
||||||
|
# 1. JSON syntax validation using jq
|
||||||
|
# 2. Structural validation - ensures all keys from English file are present
|
||||||
|
# 3. Reports missing keys (translation incomplete)
|
||||||
|
# 4. Reports extra keys (keys not in English reference, possibly deprecated)
|
||||||
|
# 5. Emits GitHub Actions annotations for CI/CD integration
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./validate-translations.sh
|
||||||
|
#
|
||||||
|
# Environment Variables:
|
||||||
|
# EN_FILE - Path to reference English file (default: ui/src/i18n/en.json)
|
||||||
|
# TRANSLATION_DIR - Directory containing translation files (default: resources/i18n)
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 - All translations are valid
|
||||||
|
# 1 - One or more translations have structural issues
|
||||||
|
#
|
||||||
|
# GitHub Actions Integration:
|
||||||
|
# The script outputs GitHub Actions annotations using ::error and ::warning
|
||||||
|
# format that will be displayed in PR checks and workflow summaries.
|
||||||
|
|
||||||
|
# Script to validate JSON translation files structure against en.json
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Path to the reference English translation file
|
||||||
|
EN_FILE="${EN_FILE:-ui/src/i18n/en.json}"
|
||||||
|
TRANSLATION_DIR="${TRANSLATION_DIR:-resources/i18n}"
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
-v|--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
|
echo "Validates JSON translation files structure against English reference file."
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " -h, --help Show this help message"
|
||||||
|
echo " -v, --verbose Show detailed output (default: only show errors)"
|
||||||
|
echo ""
|
||||||
|
echo "Environment Variables:"
|
||||||
|
echo " EN_FILE Path to reference English file (default: ui/src/i18n/en.json)"
|
||||||
|
echo " TRANSLATION_DIR Directory with translation files (default: resources/i18n)"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " $0 # Validate all translation files (quiet mode)"
|
||||||
|
echo " $0 -v # Validate with detailed output"
|
||||||
|
echo " EN_FILE=custom/en.json $0 # Use custom reference file"
|
||||||
|
echo " TRANSLATION_DIR=custom/i18n $0 # Use custom translations directory"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
echo "Use --help for usage information" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Color codes for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo "Validating translation files structure against ${EN_FILE}..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if English reference file exists
|
||||||
|
if [[ ! -f "$EN_FILE" ]]; then
|
||||||
|
echo "::error::Reference file $EN_FILE not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Function to extract all JSON keys from a file, creating a flat list of dot-separated paths
|
||||||
|
extract_keys() {
|
||||||
|
local file="$1"
|
||||||
|
jq -r 'paths(scalars) as $p | $p | join(".")' "$file" 2>/dev/null | sort
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to extract all non-empty string keys (to identify structural issues)
|
||||||
|
extract_structure_keys() {
|
||||||
|
local file="$1"
|
||||||
|
# Get only keys where values are not empty strings
|
||||||
|
jq -r 'paths(scalars) as $p | select(getpath($p) != "") | $p | join(".")' "$file" 2>/dev/null | sort
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to validate a single translation file
|
||||||
|
validate_translation() {
|
||||||
|
local translation_file="$1"
|
||||||
|
local filename=$(basename "$translation_file")
|
||||||
|
local has_errors=false
|
||||||
|
local verbose=${2:-false}
|
||||||
|
|
||||||
|
if [[ "$verbose" == "true" ]]; then
|
||||||
|
echo "Validating $filename..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# First validate JSON syntax
|
||||||
|
if ! jq empty "$translation_file" 2>/dev/null; then
|
||||||
|
echo "::error file=$translation_file::Invalid JSON syntax"
|
||||||
|
echo -e "${RED}✗ $filename has invalid JSON syntax${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract all keys from both files (for statistics)
|
||||||
|
local en_keys_file=$(mktemp)
|
||||||
|
local translation_keys_file=$(mktemp)
|
||||||
|
|
||||||
|
extract_keys "$EN_FILE" > "$en_keys_file"
|
||||||
|
extract_keys "$translation_file" > "$translation_keys_file"
|
||||||
|
|
||||||
|
# Extract only non-empty structure keys (to validate structural issues)
|
||||||
|
local en_structure_file=$(mktemp)
|
||||||
|
local translation_structure_file=$(mktemp)
|
||||||
|
|
||||||
|
extract_structure_keys "$EN_FILE" > "$en_structure_file"
|
||||||
|
extract_structure_keys "$translation_file" > "$translation_structure_file"
|
||||||
|
|
||||||
|
# Find structural issues: keys in translation not in English (misplaced)
|
||||||
|
local extra_keys=$(comm -13 "$en_keys_file" "$translation_keys_file")
|
||||||
|
|
||||||
|
# Find missing keys (for statistics only)
|
||||||
|
local missing_keys=$(comm -23 "$en_keys_file" "$translation_keys_file")
|
||||||
|
|
||||||
|
# Count keys for statistics
|
||||||
|
local total_en_keys=$(wc -l < "$en_keys_file")
|
||||||
|
local total_translation_keys=$(wc -l < "$translation_keys_file")
|
||||||
|
local missing_count=0
|
||||||
|
local extra_count=0
|
||||||
|
|
||||||
|
if [[ -n "$missing_keys" ]]; then
|
||||||
|
missing_count=$(echo "$missing_keys" | grep -c '^' || echo 0)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$extra_keys" ]]; then
|
||||||
|
extra_count=$(echo "$extra_keys" | grep -c '^' || echo 0)
|
||||||
|
has_errors=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Report extra/misplaced keys (these are structural issues)
|
||||||
|
if [[ -n "$extra_keys" ]]; then
|
||||||
|
if [[ "$verbose" == "true" ]]; then
|
||||||
|
echo -e "${YELLOW}Misplaced keys in $filename ($extra_count):${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r key; do
|
||||||
|
# Try to find the line number
|
||||||
|
line=$(grep -n "\"$(echo "$key" | sed 's/.*\.//')" "$translation_file" | head -1 | cut -d: -f1)
|
||||||
|
line=${line:-1} # Default to line 1 if not found
|
||||||
|
|
||||||
|
echo "::error file=$translation_file,line=$line::Misplaced key: $key"
|
||||||
|
|
||||||
|
if [[ "$verbose" == "true" ]]; then
|
||||||
|
echo " + $key (line ~$line)"
|
||||||
|
fi
|
||||||
|
done <<< "$extra_keys"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up temp files
|
||||||
|
rm -f "$en_keys_file" "$translation_keys_file" "$en_structure_file" "$translation_structure_file"
|
||||||
|
|
||||||
|
# Print statistics
|
||||||
|
if [[ "$verbose" == "true" ]]; then
|
||||||
|
echo " Keys: $total_translation_keys/$total_en_keys (Missing: $missing_count, Extra/Misplaced: $extra_count)"
|
||||||
|
|
||||||
|
if [[ "$has_errors" == "true" ]]; then
|
||||||
|
echo -e "${RED}✗ $filename has structural issues${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}✓ $filename structure is valid${NC}"
|
||||||
|
fi
|
||||||
|
elif [[ "$has_errors" == "true" ]]; then
|
||||||
|
echo -e "${RED}✗ $filename has structural issues (Extra/Misplaced: $extra_count)${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $([[ "$has_errors" == "true" ]] && echo 1 || echo 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main validation loop
|
||||||
|
validation_failed=false
|
||||||
|
total_files=0
|
||||||
|
failed_files=0
|
||||||
|
valid_files=0
|
||||||
|
|
||||||
|
for translation_file in "$TRANSLATION_DIR"/*.json; do
|
||||||
|
if [[ -f "$translation_file" ]]; then
|
||||||
|
total_files=$((total_files + 1))
|
||||||
|
if ! validate_translation "$translation_file" "$VERBOSE"; then
|
||||||
|
validation_failed=true
|
||||||
|
failed_files=$((failed_files + 1))
|
||||||
|
else
|
||||||
|
valid_files=$((valid_files + 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo "" # Add spacing between files
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo "========================================="
|
||||||
|
echo "Translation Validation Summary:"
|
||||||
|
echo " Total files: $total_files"
|
||||||
|
echo " Valid files: $valid_files"
|
||||||
|
echo " Files with structural issues: $failed_files"
|
||||||
|
echo "========================================="
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$validation_failed" == "true" ]]; then
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo -e "${RED}Translation validation failed - $failed_files file(s) have structural issues${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Translation validation failed - $failed_files/$total_files file(s) have structural issues${NC}"
|
||||||
|
fi
|
||||||
|
exit 1
|
||||||
|
elif [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo -e "${GREEN}All translation files are structurally valid${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
35
.gitignore
vendored
Normal file
35
.gitignore
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
.DS_Store
|
||||||
|
.idea
|
||||||
|
.vscode
|
||||||
|
.envrc
|
||||||
|
/navidrome
|
||||||
|
/iTunes*.xml
|
||||||
|
/tmp
|
||||||
|
/bin
|
||||||
|
data/*
|
||||||
|
vendor/*/
|
||||||
|
wiki
|
||||||
|
TODO.md
|
||||||
|
var
|
||||||
|
navidrome.toml
|
||||||
|
!release/linux/navidrome.toml
|
||||||
|
master.zip
|
||||||
|
testDB
|
||||||
|
cache/*
|
||||||
|
*.swp
|
||||||
|
dist
|
||||||
|
music
|
||||||
|
*.db*
|
||||||
|
.gitinfo
|
||||||
|
docker-compose.yml
|
||||||
|
!contrib/docker-compose.yml
|
||||||
|
binaries
|
||||||
|
navidrome-*
|
||||||
|
AGENTS.md
|
||||||
|
.github/prompts
|
||||||
|
.github/instructions
|
||||||
|
.github/git-commit-instructions.md
|
||||||
|
*.exe
|
||||||
|
*.test
|
||||||
|
*.wasm
|
||||||
|
openspec/
|
||||||
58
.golangci.yml
Normal file
58
.golangci.yml
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
version: "2"
|
||||||
|
run:
|
||||||
|
build-tags:
|
||||||
|
- netgo
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- asasalint
|
||||||
|
- asciicheck
|
||||||
|
- bidichk
|
||||||
|
- bodyclose
|
||||||
|
- copyloopvar
|
||||||
|
- dogsled
|
||||||
|
- durationcheck
|
||||||
|
- errorlint
|
||||||
|
- gocritic
|
||||||
|
- gocyclo
|
||||||
|
- goprintffuncname
|
||||||
|
- gosec
|
||||||
|
- misspell
|
||||||
|
- nakedret
|
||||||
|
- nilerr
|
||||||
|
- rowserrcheck
|
||||||
|
- unconvert
|
||||||
|
- whitespace
|
||||||
|
disable:
|
||||||
|
- staticcheck
|
||||||
|
settings:
|
||||||
|
gocritic:
|
||||||
|
disable-all: true
|
||||||
|
enabled-checks:
|
||||||
|
- deprecatedComment
|
||||||
|
gosec:
|
||||||
|
excludes:
|
||||||
|
- G501
|
||||||
|
- G401
|
||||||
|
- G505
|
||||||
|
- G115
|
||||||
|
govet:
|
||||||
|
enable:
|
||||||
|
- nilness
|
||||||
|
exclusions:
|
||||||
|
generated: lax
|
||||||
|
presets:
|
||||||
|
- comments
|
||||||
|
- common-false-positives
|
||||||
|
- legacy
|
||||||
|
- std-error-handling
|
||||||
|
paths:
|
||||||
|
- third_party$
|
||||||
|
- builtin$
|
||||||
|
- examples$
|
||||||
|
formatters:
|
||||||
|
exclusions:
|
||||||
|
generated: lax
|
||||||
|
paths:
|
||||||
|
- third_party$
|
||||||
|
- builtin$
|
||||||
|
- examples$
|
||||||
129
CODE_OF_CONDUCT.md
Normal file
129
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
navidrome@navidrome.org.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
||||||
93
CONTRIBUTING.md
Normal file
93
CONTRIBUTING.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Navidrome Contribution Guide
|
||||||
|
|
||||||
|
Navidrome is a streaming service which allows you to enjoy your music collection from anywhere. We'd welcome you to contribute to our open source project and make Navidrome even better. There are some basic guidelines which you need to follow if you like to contribute to Navidrome.
|
||||||
|
|
||||||
|
- [Asking Support Questions](#asking-support-questions)
|
||||||
|
- [Code of Conduct](#code-of-conduct)
|
||||||
|
- [Issues](#issues)
|
||||||
|
- [Pull Requests](#pull-requests)
|
||||||
|
|
||||||
|
|
||||||
|
## Asking Support Questions
|
||||||
|
We have an active [discussion forum](https://github.com/navidrome/navidrome/discussions) where users and developers can ask questions. Please don't use the GitHub issue tracker to ask questions.
|
||||||
|
|
||||||
|
## Code of Conduct
|
||||||
|
Please read the following [Code of Conduct](https://github.com/navidrome/navidrome/blob/master/CODE_OF_CONDUCT.md).
|
||||||
|
|
||||||
|
## Issues
|
||||||
|
Found any issue or bug in our codebase? Have a great idea you want to propose or discuss with
|
||||||
|
the developers? You can help by submitting an [issue](https://github.com/navidrome/navidrome/issues/new/choose)
|
||||||
|
to the GitHub repository.
|
||||||
|
|
||||||
|
**Before opening a new issue, please check if the issue has not been already made by searching
|
||||||
|
the [issues](https://github.com/navidrome/navidrome/issues)**
|
||||||
|
|
||||||
|
## Pull requests
|
||||||
|
Before submitting a pull request, ensure that you go through the following:
|
||||||
|
- Open a corresponding issue for the Pull Request, if not existing. The issue can be opened following [these guidelines](#issues)
|
||||||
|
- Ensure that there is no open or closed Pull Request corresponding to your submission to avoid duplication of effort.
|
||||||
|
- Setup the [development environment](https://www.navidrome.org/docs/developers/dev-environment/)
|
||||||
|
- Create a new branch on your forked repo and make the changes in it. Naming conventions for branch are: `<Issue Title>/<Issue Number>`. Example:
|
||||||
|
```
|
||||||
|
git checkout -b adding-docs/834 master
|
||||||
|
```
|
||||||
|
- The commits should follow a [specific convention](#commit-conventions)
|
||||||
|
- Ensure that a DCO sign-off for commits is provided via `--signoff` option of git commit
|
||||||
|
- Provide a link to the issue that will be closed via your Pull request.
|
||||||
|
|
||||||
|
### Commit Conventions
|
||||||
|
Each commit message must adhere to the following format:
|
||||||
|
```
|
||||||
|
<type>(scope): <description> - <issue number>
|
||||||
|
|
||||||
|
[optional body]
|
||||||
|
```
|
||||||
|
This improves the readability of the messages
|
||||||
|
|
||||||
|
#### Type
|
||||||
|
It can be one of the following:
|
||||||
|
1. **feat**: Addition of a new feature
|
||||||
|
2. **fix**: Bug fix
|
||||||
|
3. **sec**: Fixing security issues
|
||||||
|
4. **docs**: Documentation Changes
|
||||||
|
5. **style**: Changes to styling
|
||||||
|
6. **refactor**: Refactoring of code
|
||||||
|
7. **perf**: Code that affects performance
|
||||||
|
8. **test**: Updating or improving the current tests
|
||||||
|
9. **build**: Changes to Build process
|
||||||
|
10. **revert**: Reverting to a previous commit
|
||||||
|
11. **chore** : updating grunt tasks etc
|
||||||
|
|
||||||
|
If there is a breaking change in your Pull Request, please add `BREAKING CHANGE` in the optional body section
|
||||||
|
|
||||||
|
#### Scope
|
||||||
|
The file or folder where the changes are made. If there are more than one, you can mention any
|
||||||
|
|
||||||
|
#### Description
|
||||||
|
A short description of the issue
|
||||||
|
|
||||||
|
#### Issue number
|
||||||
|
The issue fixed by this Pull Request.
|
||||||
|
|
||||||
|
The body is optional. It may contain short description of changes made.
|
||||||
|
|
||||||
|
Following all the guidelines an ideal commit will look like:
|
||||||
|
```
|
||||||
|
git commit --signoff -m "feat(themes): New-theme - #834"
|
||||||
|
```
|
||||||
|
|
||||||
|
After committing, push your commits to your forked branch and create a Pull Request from there.
|
||||||
|
The Pull Request Title can be the same as `<type>(scope): <description> - <issue number>`
|
||||||
|
A demo layout of how the Pull request body can look:
|
||||||
|
```
|
||||||
|
Closes <Issue number along with link>
|
||||||
|
|
||||||
|
Description (What does the pull request do)
|
||||||
|
|
||||||
|
Changes (What changes were made )
|
||||||
|
|
||||||
|
Screenshots or Videos
|
||||||
|
|
||||||
|
Related Issues and Pull Requests(if any)
|
||||||
|
|
||||||
|
```
|
||||||
146
Dockerfile
Normal file
146
Dockerfile
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
### Build xx (original image: tonistiigi/xx)
|
||||||
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
|
||||||
|
|
||||||
|
# v1.5.0
|
||||||
|
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||||
|
|
||||||
|
RUN apk add -U --no-cache git
|
||||||
|
RUN git clone https://github.com/tonistiigi/xx && \
|
||||||
|
cd xx && \
|
||||||
|
git checkout ${XX_VERSION} && \
|
||||||
|
mkdir -p /out && \
|
||||||
|
cp src/xx-* /out/
|
||||||
|
|
||||||
|
RUN cd /out && \
|
||||||
|
ln -s xx-cc /out/xx-clang && \
|
||||||
|
ln -s xx-cc /out/xx-clang++ && \
|
||||||
|
ln -s xx-cc /out/xx-c++ && \
|
||||||
|
ln -s xx-apt /out/xx-apt-get
|
||||||
|
|
||||||
|
# xx mimics the original tonistiigi/xx image
|
||||||
|
FROM scratch AS xx
|
||||||
|
COPY --from=xx-build /out/ /usr/bin/
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
### Get TagLib
|
||||||
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
||||||
|
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||||
|
|
||||||
|
# wget in busybox can't follow redirects
|
||||||
|
RUN <<EOT
|
||||||
|
apk add --no-cache wget
|
||||||
|
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
|
||||||
|
FILE=taglib-${PLATFORM}.tar.gz
|
||||||
|
|
||||||
|
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
|
||||||
|
wget ${DOWNLOAD_URL}
|
||||||
|
|
||||||
|
mkdir /taglib
|
||||||
|
tar -xzf ${FILE} -C /taglib
|
||||||
|
EOT
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
### Build Navidrome UI
|
||||||
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install node dependencies
|
||||||
|
COPY ui/package.json ui/package-lock.json ./
|
||||||
|
COPY ui/bin/ ./bin/
|
||||||
|
RUN npm ci
|
||||||
|
|
||||||
|
# Build bundle
|
||||||
|
COPY ui/ ./
|
||||||
|
RUN npm run build -- --outDir=/build
|
||||||
|
|
||||||
|
FROM scratch AS ui-bundle
|
||||||
|
COPY --from=ui /build /build
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
### Build Navidrome binary
|
||||||
|
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.25-bookworm AS base
|
||||||
|
RUN apt-get update && apt-get install -y clang lld
|
||||||
|
COPY --from=xx / /
|
||||||
|
WORKDIR /workspace
|
||||||
|
|
||||||
|
FROM --platform=$BUILDPLATFORM base AS build
|
||||||
|
|
||||||
|
# Install build dependencies for the target platform
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
|
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||||
|
RUN xx-verify --setup
|
||||||
|
|
||||||
|
RUN --mount=type=bind,source=. \
|
||||||
|
--mount=type=cache,target=/root/.cache \
|
||||||
|
--mount=type=cache,target=/go/pkg/mod \
|
||||||
|
go mod download
|
||||||
|
|
||||||
|
ARG GIT_SHA
|
||||||
|
ARG GIT_TAG
|
||||||
|
|
||||||
|
RUN --mount=type=bind,source=. \
|
||||||
|
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||||
|
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||||
|
--mount=type=cache,target=/root/.cache \
|
||||||
|
--mount=type=cache,target=/go/pkg/mod \
|
||||||
|
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
|
||||||
|
|
||||||
|
# Setup CGO cross-compilation environment
|
||||||
|
xx-go --wrap
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||||
|
cat $(go env GOENV)
|
||||||
|
|
||||||
|
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
|
||||||
|
# So let's use gcc for everything except Darwin.
|
||||||
|
if [ "$(xx-info os)" != "darwin" ]; then
|
||||||
|
export CC=$(xx-info)-gcc
|
||||||
|
export CXX=$(xx-info)-g++
|
||||||
|
export LD_EXTRA="-extldflags '-static -latomic'"
|
||||||
|
fi
|
||||||
|
if [ "$(xx-info os)" = "windows" ]; then
|
||||||
|
export EXT=".exe"
|
||||||
|
fi
|
||||||
|
|
||||||
|
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
|
||||||
|
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
|
||||||
|
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
|
||||||
|
-o /out/navidrome${EXT} .
|
||||||
|
EOT
|
||||||
|
|
||||||
|
# Verify if the binary was built for the correct platform and it is statically linked
|
||||||
|
RUN xx-verify --static /out/navidrome*
|
||||||
|
|
||||||
|
FROM scratch AS binary
|
||||||
|
COPY --from=build /out /
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
### Build Final Image
|
||||||
|
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
|
||||||
|
LABEL maintainer="deluan@navidrome.org"
|
||||||
|
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||||
|
|
||||||
|
# Install ffmpeg and mpv
|
||||||
|
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||||
|
|
||||||
|
# Copy navidrome binary
|
||||||
|
COPY --from=build /out/navidrome /app/
|
||||||
|
|
||||||
|
VOLUME ["/data", "/music"]
|
||||||
|
ENV ND_MUSICFOLDER=/music
|
||||||
|
ENV ND_DATAFOLDER=/data
|
||||||
|
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||||
|
ENV ND_PORT=4533
|
||||||
|
RUN touch /.nddockerenv
|
||||||
|
|
||||||
|
EXPOSE ${ND_PORT}
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/navidrome"]
|
||||||
|
|
||||||
621
LICENSE
Normal file
621
LICENSE
Normal file
@@ -0,0 +1,621 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
297
Makefile
Normal file
297
Makefile
Normal file
@@ -0,0 +1,297 @@
|
|||||||
|
GO_VERSION=$(shell grep "^go " go.mod | cut -f 2 -d ' ')
|
||||||
|
NODE_VERSION=$(shell cat .nvmrc)
|
||||||
|
|
||||||
|
ifneq ("$(wildcard .git/HEAD)","")
|
||||||
|
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||||
|
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||||
|
else
|
||||||
|
GIT_SHA=source_archive
|
||||||
|
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
|
||||||
|
endif
|
||||||
|
|
||||||
|
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,darwin/amd64,darwin/arm64,windows/amd64,windows/386
|
||||||
|
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
|
||||||
|
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
|
||||||
|
DOCKER_TAG ?= deluan/navidrome:develop
|
||||||
|
|
||||||
|
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||||
|
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
||||||
|
GOLANGCI_LINT_VERSION ?= v2.6.2
|
||||||
|
|
||||||
|
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||||
|
|
||||||
|
setup: check_env download-deps install-golangci-lint setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||||
|
@echo Downloading Node dependencies...
|
||||||
|
@(cd ./ui && npm ci)
|
||||||
|
.PHONY: setup
|
||||||
|
|
||||||
|
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
|
||||||
|
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
|
||||||
|
.PHONY: dev
|
||||||
|
|
||||||
|
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||||
|
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||||
|
.PHONY: server
|
||||||
|
|
||||||
|
stop: ##@Development Stop development servers (UI and backend)
|
||||||
|
@echo "Stopping development servers..."
|
||||||
|
@-pkill -f "vite"
|
||||||
|
@-pkill -f "go tool reflex.*reflex.conf"
|
||||||
|
@-pkill -f "go run.*netgo"
|
||||||
|
@echo "Development servers stopped."
|
||||||
|
.PHONY: stop
|
||||||
|
|
||||||
|
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||||
|
go tool ginkgo watch -tags=netgo -notify ./...
|
||||||
|
.PHONY: watch
|
||||||
|
|
||||||
|
PKG ?= ./...
|
||||||
|
test: ##@Development Run Go tests. Use PKG variable to specify packages to test, e.g. make test PKG=./server
|
||||||
|
go test -tags netgo $(PKG)
|
||||||
|
.PHONY: test
|
||||||
|
|
||||||
|
testall: test-race test-i18n test-js ##@Development Run Go and JS tests
|
||||||
|
.PHONY: testall
|
||||||
|
|
||||||
|
test-race: ##@Development Run Go tests with race detector
|
||||||
|
go test -tags netgo -race -shuffle=on $(PKG)
|
||||||
|
.PHONY: test-race
|
||||||
|
|
||||||
|
test-js: ##@Development Run JS tests
|
||||||
|
@(cd ./ui && npm run test)
|
||||||
|
.PHONY: test-js
|
||||||
|
|
||||||
|
test-i18n: ##@Development Validate all translations files
|
||||||
|
./.github/workflows/validate-translations.sh
|
||||||
|
.PHONY: test-i18n
|
||||||
|
|
||||||
|
install-golangci-lint: ##@Development Install golangci-lint if not present
|
||||||
|
@INSTALL=false; \
|
||||||
|
if PATH=$$PATH:./bin which golangci-lint > /dev/null 2>&1; then \
|
||||||
|
CURRENT_VERSION=$$(PATH=$$PATH:./bin golangci-lint version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n1); \
|
||||||
|
REQUIRED_VERSION=$$(echo "$(GOLANGCI_LINT_VERSION)" | sed 's/^v//'); \
|
||||||
|
if [ "$$CURRENT_VERSION" != "$$REQUIRED_VERSION" ]; then \
|
||||||
|
echo "Found golangci-lint $$CURRENT_VERSION, but $$REQUIRED_VERSION is required. Reinstalling..."; \
|
||||||
|
rm -f ./bin/golangci-lint; \
|
||||||
|
INSTALL=true; \
|
||||||
|
fi; \
|
||||||
|
else \
|
||||||
|
INSTALL=true; \
|
||||||
|
fi; \
|
||||||
|
if [ "$$INSTALL" = "true" ]; then \
|
||||||
|
echo "Installing golangci-lint $(GOLANGCI_LINT_VERSION)..."; \
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s $(GOLANGCI_LINT_VERSION); \
|
||||||
|
fi
|
||||||
|
.PHONY: install-golangci-lint
|
||||||
|
|
||||||
|
lint: install-golangci-lint ##@Development Lint Go code
|
||||||
|
PATH=$$PATH:./bin golangci-lint run -v --timeout 5m
|
||||||
|
.PHONY: lint
|
||||||
|
|
||||||
|
lintall: lint ##@Development Lint Go and JS code
|
||||||
|
@(cd ./ui && npm run check-formatting) || (echo "\n\nPlease run 'npm run prettier' to fix formatting issues." && exit 1)
|
||||||
|
@(cd ./ui && npm run lint)
|
||||||
|
.PHONY: lintall
|
||||||
|
|
||||||
|
format: ##@Development Format code
|
||||||
|
@(cd ./ui && npm run prettier)
|
||||||
|
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$ | grep -v .pb.go$$`
|
||||||
|
@go mod tidy
|
||||||
|
.PHONY: format
|
||||||
|
|
||||||
|
wire: check_go_env ##@Development Update Dependency Injection
|
||||||
|
go tool wire gen -tags=netgo ./...
|
||||||
|
.PHONY: wire
|
||||||
|
|
||||||
|
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||||
|
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||||
|
.PHONY: snapshots
|
||||||
|
|
||||||
|
migration-sql: ##@Development Create an empty SQL migration file
|
||||||
|
@if [ -z "${name}" ]; then echo "Usage: make migration-sql name=name_of_migration_file"; exit 1; fi
|
||||||
|
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name} sql
|
||||||
|
.PHONY: migration
|
||||||
|
|
||||||
|
migration-go: ##@Development Create an empty Go migration file
|
||||||
|
@if [ -z "${name}" ]; then echo "Usage: make migration-go name=name_of_migration_file"; exit 1; fi
|
||||||
|
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name}
|
||||||
|
.PHONY: migration
|
||||||
|
|
||||||
|
setup-dev: setup
|
||||||
|
.PHONY: setup-dev
|
||||||
|
|
||||||
|
setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
|
||||||
|
@echo Setting up git hooks
|
||||||
|
@mkdir -p .git/hooks
|
||||||
|
@(cd .git/hooks && ln -sf ../../git/* .)
|
||||||
|
.PHONY: setup-git
|
||||||
|
|
||||||
|
build: check_go_env buildjs ##@Build Build the project
|
||||||
|
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||||
|
.PHONY: build
|
||||||
|
|
||||||
|
buildall: deprecated build
|
||||||
|
.PHONY: buildall
|
||||||
|
|
||||||
|
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
|
||||||
|
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||||
|
.PHONY: debug-build
|
||||||
|
|
||||||
|
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
|
||||||
|
.PHONY: buildjs
|
||||||
|
|
||||||
|
docker-buildjs: ##@Build Build only frontend using Docker
|
||||||
|
docker build --output "./ui" --target ui-bundle .
|
||||||
|
.PHONY: docker-buildjs
|
||||||
|
|
||||||
|
ui/build/index.html: $(UI_SRC_FILES)
|
||||||
|
@(cd ./ui && npm run build)
|
||||||
|
|
||||||
|
docker-platforms: ##@Cross_Compilation List supported platforms
|
||||||
|
@echo "Supported platforms:"
|
||||||
|
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
|
||||||
|
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
|
||||||
|
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
|
||||||
|
.PHONY: docker-platforms
|
||||||
|
|
||||||
|
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
|
||||||
|
docker buildx build \
|
||||||
|
--platform $(PLATFORMS) \
|
||||||
|
--build-arg GIT_TAG=${GIT_TAG} \
|
||||||
|
--build-arg GIT_SHA=${GIT_SHA} \
|
||||||
|
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||||
|
--output "./binaries" --target binary .
|
||||||
|
.PHONY: docker-build
|
||||||
|
|
||||||
|
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
|
||||||
|
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
|
||||||
|
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
|
||||||
|
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
|
||||||
|
docker buildx build \
|
||||||
|
--platform $(IMAGE_PLATFORMS) \
|
||||||
|
--build-arg GIT_TAG=${GIT_TAG} \
|
||||||
|
--build-arg GIT_SHA=${GIT_SHA} \
|
||||||
|
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||||
|
--tag $(DOCKER_TAG) .
|
||||||
|
.PHONY: docker-image
|
||||||
|
|
||||||
|
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
|
||||||
|
make docker-build PLATFORMS=windows/386,windows/amd64
|
||||||
|
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
|
||||||
|
@rm -rf binaries/msi
|
||||||
|
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
|
||||||
|
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
|
||||||
|
@du -h binaries/msi/*.msi
|
||||||
|
.PHONY: docker-msi
|
||||||
|
|
||||||
|
run-docker: ##@Development Run a Navidrome Docker image. Usage: make run-docker tag=<tag>
|
||||||
|
@if [ -z "$(tag)" ]; then echo "Usage: make run-docker tag=<tag>"; exit 1; fi
|
||||||
|
@TAG_DIR="tmp/$$(echo '$(tag)' | tr '/:' '_')"; mkdir -p "$$TAG_DIR"; \
|
||||||
|
VOLUMES="-v $(PWD)/$$TAG_DIR:/data"; \
|
||||||
|
if [ -f navidrome.toml ]; then \
|
||||||
|
VOLUMES="$$VOLUMES -v $(PWD)/navidrome.toml:/data/navidrome.toml:ro"; \
|
||||||
|
MUSIC_FOLDER=$$(grep '^MusicFolder' navidrome.toml | head -n1 | sed 's/.*= *"//' | sed 's/".*//'); \
|
||||||
|
if [ -n "$$MUSIC_FOLDER" ] && [ -d "$$MUSIC_FOLDER" ]; then \
|
||||||
|
VOLUMES="$$VOLUMES -v $$MUSIC_FOLDER:/music:ro"; \
|
||||||
|
fi; \
|
||||||
|
fi; \
|
||||||
|
echo "Running: docker run --rm -p 4533:4533 $$VOLUMES $(tag)"; docker run --rm -p 4533:4533 $$VOLUMES $(tag)
|
||||||
|
.PHONY: run-docker
|
||||||
|
|
||||||
|
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
|
||||||
|
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
|
||||||
|
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
|
||||||
|
.PHONY: package
|
||||||
|
|
||||||
|
get-music: ##@Development Download some free music from Navidrome's demo instance
|
||||||
|
mkdir -p music
|
||||||
|
( cd music; \
|
||||||
|
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
|
||||||
|
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
|
||||||
|
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
|
||||||
|
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
|
||||||
|
for file in *.zip; do unzip -n $${file}; done )
|
||||||
|
@echo "Done. Remember to set your MusicFolder to ./music"
|
||||||
|
.PHONY: get-music
|
||||||
|
|
||||||
|
|
||||||
|
##########################################
|
||||||
|
#### Miscellaneous
|
||||||
|
|
||||||
|
clean:
|
||||||
|
@rm -rf ./binaries ./dist ./ui/build/*
|
||||||
|
@touch ./ui/build/.gitkeep
|
||||||
|
.PHONY: clean
|
||||||
|
|
||||||
|
release:
|
||||||
|
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||||
|
go mod tidy
|
||||||
|
@if [ -n "`git status -s`" ]; then echo "\n\nThere are pending changes. Please commit or stash first"; exit 1; fi
|
||||||
|
make pre-push
|
||||||
|
git tag v${V}
|
||||||
|
git push origin v${V} --no-verify
|
||||||
|
.PHONY: release
|
||||||
|
|
||||||
|
download-deps:
|
||||||
|
@echo Downloading Go dependencies...
|
||||||
|
@go mod download
|
||||||
|
@go mod tidy # To revert any changes made by the `go mod download` command
|
||||||
|
.PHONY: download-deps
|
||||||
|
|
||||||
|
check_env: check_go_env check_node_env
|
||||||
|
.PHONY: check_env
|
||||||
|
|
||||||
|
check_go_env:
|
||||||
|
@(hash go) || (echo "\nERROR: GO environment not setup properly!\n"; exit 1)
|
||||||
|
@current_go_version=`go version | cut -d ' ' -f 3 | cut -c3-` && \
|
||||||
|
echo "$(GO_VERSION) $$current_go_version" | \
|
||||||
|
tr ' ' '\n' | sort -V | tail -1 | \
|
||||||
|
grep -q "^$${current_go_version}$$" || \
|
||||||
|
(echo "\nERROR: Please upgrade your GO version\nThis project requires at least the version $(GO_VERSION)"; exit 1)
|
||||||
|
.PHONY: check_go_env
|
||||||
|
|
||||||
|
check_node_env:
|
||||||
|
@(hash node) || (echo "\nERROR: Node environment not setup properly!\n"; exit 1)
|
||||||
|
@current_node_version=`node --version` && \
|
||||||
|
echo "$(NODE_VERSION) $$current_node_version" | \
|
||||||
|
tr ' ' '\n' | sort -V | tail -1 | \
|
||||||
|
grep -q "^$${current_node_version}$$" || \
|
||||||
|
(echo "\nERROR: Please check your Node version. Should be at least $(NODE_VERSION)\n"; exit 1)
|
||||||
|
.PHONY: check_node_env
|
||||||
|
|
||||||
|
pre-push: lintall testall
|
||||||
|
.PHONY: pre-push
|
||||||
|
|
||||||
|
deprecated:
|
||||||
|
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
|
||||||
|
.PHONY: deprecated
|
||||||
|
|
||||||
|
# Generate Go code from plugins/api/api.proto
|
||||||
|
plugin-gen: check_go_env ##@Development Generate Go code from plugins protobuf files
|
||||||
|
go generate ./plugins/...
|
||||||
|
.PHONY: plugin-gen
|
||||||
|
|
||||||
|
plugin-examples: check_go_env ##@Development Build all example plugins
|
||||||
|
$(MAKE) -C plugins/examples clean all
|
||||||
|
.PHONY: plugin-examples
|
||||||
|
|
||||||
|
plugin-clean: check_go_env ##@Development Clean all plugins
|
||||||
|
$(MAKE) -C plugins/examples clean
|
||||||
|
$(MAKE) -C plugins/testdata clean
|
||||||
|
.PHONY: plugin-clean
|
||||||
|
|
||||||
|
plugin-tests: check_go_env ##@Development Build all test plugins
|
||||||
|
$(MAKE) -C plugins/testdata clean all
|
||||||
|
.PHONY: plugin-tests
|
||||||
|
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
|
|
||||||
|
HELP_FUN = \
|
||||||
|
%help; while(<>){push@{$$help{$$2//'options'}},[$$1,$$3] \
|
||||||
|
if/^([\w-_]+)\s*:.*\#\#(?:@(\w+))?\s(.*)$$/}; \
|
||||||
|
print"$$_:\n", map" $$_->[0]".(" "x(20-length($$_->[0])))."$$_->[1]\n",\
|
||||||
|
@{$$help{$$_}},"\n" for sort keys %help; \
|
||||||
|
|
||||||
|
help: ##@Miscellaneous Show this help
|
||||||
|
@echo "Usage: make [target] ...\n"
|
||||||
|
@perl -e '$(HELP_FUN)' $(MAKEFILE_LIST)
|
||||||
2
Procfile.dev
Normal file
2
Procfile.dev
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
JS: sh -c "cd ./ui && npm start"
|
||||||
|
GO: go tool reflex -d none -c reflex.conf
|
||||||
91
README.md
Normal file
91
README.md
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
<a href="https://www.navidrome.org"><img src="resources/logo-192x192.png" alt="Navidrome logo" title="navidrome" align="right" height="60px" /></a>
|
||||||
|
|
||||||
|
# Navidrome Music Server [](https://twitter.com/intent/tweet?text=Tired%20of%20paying%20for%20music%20subscriptions%2C%20and%20not%20finding%20what%20you%20really%20like%3F%20Roll%20your%20own%20streaming%20service%21&url=https://navidrome.org&via=navidrome)
|
||||||
|
|
||||||
|
[](https://github.com/navidrome/navidrome/releases)
|
||||||
|
[](https://nightly.link/navidrome/navidrome/workflows/pipeline/master)
|
||||||
|
[](https://github.com/navidrome/navidrome/releases/latest)
|
||||||
|
[](https://hub.docker.com/r/deluan/navidrome)
|
||||||
|
[](https://discord.gg/xh7j7yF)
|
||||||
|
[](https://www.reddit.com/r/navidrome/)
|
||||||
|
[](CODE_OF_CONDUCT.md)
|
||||||
|
[](https://gurubase.io/g/navidrome)
|
||||||
|
|
||||||
|
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
|
||||||
|
music collection from any browser or mobile device. It's like your personal Spotify!
|
||||||
|
|
||||||
|
This is a modified version of the [original Navidrome](https://github.com/navidrome/navidrome), enhanced with Meilisearch support.
|
||||||
|
|
||||||
|
|
||||||
|
**Note**: The `master` branch may be in an unstable or even broken state during development.
|
||||||
|
Please use [releases](https://github.com/navidrome/navidrome/releases) instead of
|
||||||
|
the `master` branch in order to get a stable set of binaries.
|
||||||
|
|
||||||
|
## [Check out our Live Demo!](https://www.navidrome.org/demo/)
|
||||||
|
|
||||||
|
__Any feedback is welcome!__ If you need/want a new feature, find a bug or think of any way to improve Navidrome,
|
||||||
|
please file a [GitHub issue](https://github.com/navidrome/navidrome/issues) or join the discussion in our
|
||||||
|
[Subreddit](https://www.reddit.com/r/navidrome/). If you want to contribute to the project in any other way
|
||||||
|
([ui/backend dev](https://www.navidrome.org/docs/developers/),
|
||||||
|
[translations](https://www.navidrome.org/docs/developers/translations/),
|
||||||
|
[themes](https://www.navidrome.org/docs/developers/creating-themes)), please join the chat in our
|
||||||
|
[Discord server](https://discord.gg/xh7j7yF).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
See instructions on the [project's website](https://www.navidrome.org/docs/installation/)
|
||||||
|
|
||||||
|
## Cloud Hosting
|
||||||
|
|
||||||
|
[PikaPods](https://www.pikapods.com) has partnered with us to offer you an
|
||||||
|
[officially supported, cloud-hosted solution](https://www.navidrome.org/docs/installation/managed/#pikapods).
|
||||||
|
A share of the revenue helps fund the development of Navidrome at no additional cost for you.
|
||||||
|
|
||||||
|
[](https://www.pikapods.com/pods?run=navidrome)
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Handles very **large music collections**
|
||||||
|
- Streams virtually **any audio format** available
|
||||||
|
- Reads and uses all your beautifully curated **metadata**
|
||||||
|
- Great support for **compilations** (Various Artists albums) and **box sets** (multi-disc albums)
|
||||||
|
- **Multi-user**, each user has their own play counts, playlists, favourites, etc...
|
||||||
|
- Very **low resource usage**
|
||||||
|
- **Multi-platform**, runs on macOS, Linux and Windows. **Docker** images are also provided
|
||||||
|
- Ready to use binaries for all major platforms, including **Raspberry Pi**
|
||||||
|
- Automatically **monitors your library** for changes, importing new files and reloading new metadata
|
||||||
|
- **Themeable**, modern and responsive **Web interface** based on [Material UI](https://material-ui.com)
|
||||||
|
- **Compatible** with all Subsonic/Madsonic/Airsonic [clients](https://www.navidrome.org/docs/overview/#apps)
|
||||||
|
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
|
||||||
|
- **Meilisearch Integration** for high-performance full-text search (optional)
|
||||||
|
- Translated to **various languages**
|
||||||
|
|
||||||
|
## Translations
|
||||||
|
|
||||||
|
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
|
||||||
|
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
|
||||||
|
|
||||||
|
<a href="https://poeditor.com/">
|
||||||
|
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
|
||||||
|
</a>
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
All documentation can be found in the project's website: https://www.navidrome.org/docs.
|
||||||
|
Here are some useful direct links:
|
||||||
|
|
||||||
|
- [Overview](https://www.navidrome.org/docs/overview/)
|
||||||
|
- [Installation](https://www.navidrome.org/docs/installation/)
|
||||||
|
- [Docker](https://www.navidrome.org/docs/installation/docker/)
|
||||||
|
- [Binaries](https://www.navidrome.org/docs/installation/pre-built-binaries/)
|
||||||
|
- [Build from source](https://www.navidrome.org/docs/installation/build-from-source/)
|
||||||
|
- [Development](https://www.navidrome.org/docs/developers/)
|
||||||
|
- [Subsonic API Compatibility](https://www.navidrome.org/docs/developers/subsonic-api/)
|
||||||
|
|
||||||
|
## Screenshots
|
||||||
|
|
||||||
|
<p align="left">
|
||||||
|
<img height="550" src="https://raw.githubusercontent.com/navidrome/navidrome/master/.github/screenshots/ss-mobile-login.png">
|
||||||
|
<img height="550" src="https://raw.githubusercontent.com/navidrome/navidrome/master/.github/screenshots/ss-mobile-player.png">
|
||||||
|
<img height="550" src="https://raw.githubusercontent.com/navidrome/navidrome/master/.github/screenshots/ss-mobile-album-view.png">
|
||||||
|
<img width="550" src="https://raw.githubusercontent.com/navidrome/navidrome/master/.github/screenshots/ss-desktop-player.png">
|
||||||
|
</p>
|
||||||
278
adapters/taglib/end_to_end_test.go
Normal file
278
adapters/taglib/end_to_end_test.go
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
package taglib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/fs"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/djherbis/times"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/metadata"
|
||||||
|
"github.com/navidrome/navidrome/utils/gg"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
type testFileInfo struct {
|
||||||
|
fs.FileInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t testFileInfo) BirthTime() time.Time {
|
||||||
|
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||||
|
return ts.BirthTime()
|
||||||
|
}
|
||||||
|
return t.FileInfo.ModTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("Extractor", func() {
|
||||||
|
toP := func(name, sortName, mbid string) model.Participant {
|
||||||
|
return model.Participant{
|
||||||
|
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
roles := []struct {
|
||||||
|
model.Role
|
||||||
|
model.ParticipantList
|
||||||
|
}{
|
||||||
|
{model.RoleComposer, model.ParticipantList{
|
||||||
|
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||||
|
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||||
|
}},
|
||||||
|
{model.RoleLyricist, model.ParticipantList{
|
||||||
|
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||||
|
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||||
|
}},
|
||||||
|
{model.RoleArranger, model.ParticipantList{
|
||||||
|
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||||
|
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||||
|
}},
|
||||||
|
{model.RoleConductor, model.ParticipantList{
|
||||||
|
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||||
|
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||||
|
}},
|
||||||
|
{model.RoleDirector, model.ParticipantList{
|
||||||
|
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||||
|
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||||
|
}},
|
||||||
|
{model.RoleEngineer, model.ParticipantList{
|
||||||
|
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||||
|
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||||
|
}},
|
||||||
|
{model.RoleProducer, model.ParticipantList{
|
||||||
|
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||||
|
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||||
|
}},
|
||||||
|
{model.RoleRemixer, model.ParticipantList{
|
||||||
|
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||||
|
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||||
|
}},
|
||||||
|
{model.RoleDJMixer, model.ParticipantList{
|
||||||
|
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||||
|
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||||
|
}},
|
||||||
|
{model.RoleMixer, model.ParticipantList{
|
||||||
|
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||||
|
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
|
||||||
|
var e *extractor
|
||||||
|
|
||||||
|
parseTestFile := func(path string) *model.MediaFile {
|
||||||
|
mds, err := e.Parse(path)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
info, ok := mds[path]
|
||||||
|
Expect(ok).To(BeTrue())
|
||||||
|
|
||||||
|
fileInfo, err := os.Stat(path)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||||
|
|
||||||
|
metadata := metadata.New(path, info)
|
||||||
|
mf := metadata.ToMediaFile(1, "folderID")
|
||||||
|
return &mf
|
||||||
|
}
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
e = &extractor{}
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ReplayGain", func() {
|
||||||
|
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||||
|
mf := parseTestFile("tests/fixtures/" + file)
|
||||||
|
|
||||||
|
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||||
|
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||||
|
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||||
|
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||||
|
},
|
||||||
|
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||||
|
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("lyrics", func() {
|
||||||
|
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||||
|
return model.Lyrics{
|
||||||
|
DisplayArtist: "",
|
||||||
|
DisplayTitle: "",
|
||||||
|
Lang: code,
|
||||||
|
Line: []model.Line{
|
||||||
|
{Start: gg.P(int64(0)), Value: "This is"},
|
||||||
|
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||||
|
},
|
||||||
|
Offset: nil,
|
||||||
|
Synced: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||||
|
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||||
|
|
||||||
|
lyrics, err := mf.StructuredLyrics()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(lyrics).To(HaveLen(2))
|
||||||
|
|
||||||
|
Expect(lyrics[0].Synced).To(BeTrue())
|
||||||
|
Expect(lyrics[1].Synced).To(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle mp3 with uslt and sylt", func() {
|
||||||
|
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||||
|
|
||||||
|
lyrics, err := mf.StructuredLyrics()
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(lyrics).To(HaveLen(4))
|
||||||
|
|
||||||
|
engSylt := makeLyrics("eng", "English SYLT")
|
||||||
|
engUslt := makeLyrics("eng", "English")
|
||||||
|
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||||
|
unsUslt := makeLyrics("xxx", "unspecified")
|
||||||
|
|
||||||
|
// Why is the order inconsistent between runs? Nobody knows
|
||||||
|
Expect(lyrics).To(Or(
|
||||||
|
Equal(model.LyricList{engSylt, engUslt, unsSylt, unsUslt}),
|
||||||
|
Equal(model.LyricList{unsSylt, unsUslt, engSylt, engUslt}),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
|
||||||
|
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||||
|
mf := parseTestFile("tests/fixtures/" + file)
|
||||||
|
|
||||||
|
lyrics, err := mf.StructuredLyrics()
|
||||||
|
Expect(err).To(Not(HaveOccurred()))
|
||||||
|
Expect(lyrics).To(HaveLen(2))
|
||||||
|
|
||||||
|
unspec := makeLyrics("xxx", "unspecified")
|
||||||
|
eng := makeLyrics("xxx", "English")
|
||||||
|
|
||||||
|
if isId3 {
|
||||||
|
eng.Lang = "eng"
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(lyrics).To(Or(
|
||||||
|
Equal(model.LyricList{unspec, eng}),
|
||||||
|
Equal(model.LyricList{eng, unspec})))
|
||||||
|
},
|
||||||
|
Entry("flac", "test.flac", false),
|
||||||
|
Entry("m4a", "test.m4a", false),
|
||||||
|
Entry("ogg", "test.ogg", false),
|
||||||
|
Entry("wma", "test.wma", false),
|
||||||
|
Entry("wv", "test.wv", false),
|
||||||
|
Entry("wav", "test.wav", true),
|
||||||
|
Entry("aiff", "test.aiff", true),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Participants", func() {
|
||||||
|
DescribeTable("test tags consistent across formats", func(format string) {
|
||||||
|
mf := parseTestFile("tests/fixtures/test." + format)
|
||||||
|
|
||||||
|
for _, data := range roles {
|
||||||
|
role := data.Role
|
||||||
|
artists := data.ParticipantList
|
||||||
|
|
||||||
|
actual := mf.Participants[role]
|
||||||
|
Expect(actual).To(HaveLen(len(artists)))
|
||||||
|
|
||||||
|
for i := range artists {
|
||||||
|
actualArtist := actual[i]
|
||||||
|
expectedArtist := artists[i]
|
||||||
|
|
||||||
|
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||||
|
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||||
|
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if format != "m4a" {
|
||||||
|
performers := mf.Participants[model.RolePerformer]
|
||||||
|
Expect(performers).To(HaveLen(8))
|
||||||
|
|
||||||
|
rules := map[string][]string{
|
||||||
|
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||||
|
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||||
|
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||||
|
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||||
|
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, rule := range rules {
|
||||||
|
mbid := rule[0]
|
||||||
|
for i := 1; i < len(rule); i++ {
|
||||||
|
found := false
|
||||||
|
|
||||||
|
for _, mapped := range performers {
|
||||||
|
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Entry("FLAC format", "flac"),
|
||||||
|
Entry("M4a format", "m4a"),
|
||||||
|
Entry("OGG format", "ogg"),
|
||||||
|
Entry("WV format", "wv"),
|
||||||
|
|
||||||
|
Entry("MP3 format", "mp3"),
|
||||||
|
Entry("WAV format", "wav"),
|
||||||
|
Entry("AIFF format", "aiff"),
|
||||||
|
)
|
||||||
|
|
||||||
|
It("should parse wma", func() {
|
||||||
|
mf := parseTestFile("tests/fixtures/test.wma")
|
||||||
|
|
||||||
|
for _, data := range roles {
|
||||||
|
role := data.Role
|
||||||
|
artists := data.ParticipantList
|
||||||
|
actual := mf.Participants[role]
|
||||||
|
|
||||||
|
// WMA has no Arranger role
|
||||||
|
if role == model.RoleArranger {
|
||||||
|
Expect(actual).To(HaveLen(0))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||||
|
|
||||||
|
// For some bizarre reason, the order is inverted. We also don't get
|
||||||
|
// sort names or MBIDs
|
||||||
|
for i := range artists {
|
||||||
|
idx := len(artists) - 1 - i
|
||||||
|
|
||||||
|
actualArtist := actual[i]
|
||||||
|
expectedArtist := artists[idx]
|
||||||
|
|
||||||
|
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
9
adapters/taglib/get_filename.go
Normal file
9
adapters/taglib/get_filename.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
//go:build !windows
|
||||||
|
|
||||||
|
package taglib
|
||||||
|
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
func getFilename(s string) *C.char {
|
||||||
|
return C.CString(s)
|
||||||
|
}
|
||||||
96
adapters/taglib/get_filename_win.go
Normal file
96
adapters/taglib/get_filename_win.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
//go:build windows
|
||||||
|
|
||||||
|
package taglib
|
||||||
|
|
||||||
|
// From https://github.com/orofarne/gowchar
|
||||||
|
|
||||||
|
/*
|
||||||
|
#include <wchar.h>
|
||||||
|
|
||||||
|
const size_t SIZEOF_WCHAR_T = sizeof(wchar_t);
|
||||||
|
|
||||||
|
void gowchar_set (wchar_t *arr, int pos, wchar_t val)
|
||||||
|
{
|
||||||
|
arr[pos] = val;
|
||||||
|
}
|
||||||
|
|
||||||
|
wchar_t gowchar_get (wchar_t *arr, int pos)
|
||||||
|
{
|
||||||
|
return arr[pos];
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"unicode/utf16"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
var SIZEOF_WCHAR_T C.size_t = C.size_t(C.SIZEOF_WCHAR_T)
|
||||||
|
|
||||||
|
func getFilename(s string) *C.wchar_t {
|
||||||
|
wstr, _ := StringToWcharT(s)
|
||||||
|
return wstr
|
||||||
|
}
|
||||||
|
|
||||||
|
func StringToWcharT(s string) (*C.wchar_t, C.size_t) {
|
||||||
|
switch SIZEOF_WCHAR_T {
|
||||||
|
case 2:
|
||||||
|
return stringToWchar2(s) // Windows
|
||||||
|
case 4:
|
||||||
|
return stringToWchar4(s) // Unix
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("Invalid sizeof(wchar_t) = %v", SIZEOF_WCHAR_T))
|
||||||
|
}
|
||||||
|
panic("?!!")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Windows
|
||||||
|
func stringToWchar2(s string) (*C.wchar_t, C.size_t) {
|
||||||
|
var slen int
|
||||||
|
s1 := s
|
||||||
|
for len(s1) > 0 {
|
||||||
|
r, size := utf8.DecodeRuneInString(s1)
|
||||||
|
if er, _ := utf16.EncodeRune(r); er == '\uFFFD' {
|
||||||
|
slen += 1
|
||||||
|
} else {
|
||||||
|
slen += 2
|
||||||
|
}
|
||||||
|
s1 = s1[size:]
|
||||||
|
}
|
||||||
|
slen++ // \0
|
||||||
|
res := C.malloc(C.size_t(slen) * SIZEOF_WCHAR_T)
|
||||||
|
var i int
|
||||||
|
for len(s) > 0 {
|
||||||
|
r, size := utf8.DecodeRuneInString(s)
|
||||||
|
if r1, r2 := utf16.EncodeRune(r); r1 != '\uFFFD' {
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(i), C.wchar_t(r1))
|
||||||
|
i++
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(i), C.wchar_t(r2))
|
||||||
|
i++
|
||||||
|
} else {
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(i), C.wchar_t(r))
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
s = s[size:]
|
||||||
|
}
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(slen-1), C.wchar_t(0)) // \0
|
||||||
|
return (*C.wchar_t)(res), C.size_t(slen)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unix
|
||||||
|
func stringToWchar4(s string) (*C.wchar_t, C.size_t) {
|
||||||
|
slen := utf8.RuneCountInString(s)
|
||||||
|
slen++ // \0
|
||||||
|
res := C.malloc(C.size_t(slen) * SIZEOF_WCHAR_T)
|
||||||
|
var i int
|
||||||
|
for len(s) > 0 {
|
||||||
|
r, size := utf8.DecodeRuneInString(s)
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(i), C.wchar_t(r))
|
||||||
|
s = s[size:]
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
C.gowchar_set((*C.wchar_t)(res), C.int(slen-1), C.wchar_t(0)) // \0
|
||||||
|
return (*C.wchar_t)(res), C.size_t(slen)
|
||||||
|
}
|
||||||
178
adapters/taglib/taglib.go
Normal file
178
adapters/taglib/taglib.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package taglib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/fs"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/core/storage/local"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
type extractor struct {
|
||||||
|
baseDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||||
|
results := make(map[string]metadata.Info)
|
||||||
|
for _, path := range files {
|
||||||
|
props, err := e.extractMetadata(path)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results[path] = *props
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e extractor) Version() string {
|
||||||
|
return Version()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||||
|
fullPath := filepath.Join(e.baseDir, filePath)
|
||||||
|
tags, err := Read(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse audio properties
|
||||||
|
ap := metadata.AudioProperties{}
|
||||||
|
ap.BitRate = parseProp(tags, "__bitrate")
|
||||||
|
ap.Channels = parseProp(tags, "__channels")
|
||||||
|
ap.SampleRate = parseProp(tags, "__samplerate")
|
||||||
|
ap.BitDepth = parseProp(tags, "__bitspersample")
|
||||||
|
length := parseProp(tags, "__lengthinmilliseconds")
|
||||||
|
ap.Duration = (time.Millisecond * time.Duration(length)).Round(time.Millisecond * 10)
|
||||||
|
|
||||||
|
// Extract basic tags
|
||||||
|
parseBasicTag(tags, "__title", "title")
|
||||||
|
parseBasicTag(tags, "__artist", "artist")
|
||||||
|
parseBasicTag(tags, "__album", "album")
|
||||||
|
parseBasicTag(tags, "__comment", "comment")
|
||||||
|
parseBasicTag(tags, "__genre", "genre")
|
||||||
|
parseBasicTag(tags, "__year", "year")
|
||||||
|
parseBasicTag(tags, "__track", "tracknumber")
|
||||||
|
|
||||||
|
// Parse track/disc totals
|
||||||
|
parseTuple := func(prop string) {
|
||||||
|
tagName := prop + "number"
|
||||||
|
tagTotal := prop + "total"
|
||||||
|
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||||
|
parts := strings.Split(value[0], "/")
|
||||||
|
tags[tagName] = []string{parts[0]}
|
||||||
|
if len(parts) == 2 {
|
||||||
|
tags[tagTotal] = []string{parts[1]}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parseTuple("track")
|
||||||
|
parseTuple("disc")
|
||||||
|
|
||||||
|
// Adjust some ID3 tags
|
||||||
|
parseLyrics(tags)
|
||||||
|
parseTIPL(tags)
|
||||||
|
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||||
|
|
||||||
|
return &metadata.Info{
|
||||||
|
Tags: tags,
|
||||||
|
AudioProperties: ap,
|
||||||
|
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseLyrics make sure lyrics tags have language
|
||||||
|
func parseLyrics(tags map[string][]string) {
|
||||||
|
lyrics := tags["lyrics"]
|
||||||
|
if len(lyrics) > 0 {
|
||||||
|
tags["lyrics:xxx"] = lyrics
|
||||||
|
delete(tags, "lyrics")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// These are the only roles we support, based on Picard's tag map:
|
||||||
|
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
|
var tiplMapping = map[string]string{
|
||||||
|
"arranger": "arranger",
|
||||||
|
"engineer": "engineer",
|
||||||
|
"producer": "producer",
|
||||||
|
"mix": "mixer",
|
||||||
|
"DJ-mix": "djmixer",
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseProp parses a property from the tags map and sets it to the target integer.
|
||||||
|
// It also deletes the property from the tags map after parsing.
|
||||||
|
func parseProp(tags map[string][]string, prop string) int {
|
||||||
|
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||||
|
v, _ := strconv.Atoi(value[0])
|
||||||
|
delete(tags, prop)
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseBasicTag checks if a basic tag (like __title, __artist, etc.) exists in the tags map.
|
||||||
|
// If it does, it moves the value to a more appropriate tag name (like title, artist, etc.),
|
||||||
|
// and deletes the basic tag from the map. If the target tag already exists, it ignores the basic tag.
|
||||||
|
func parseBasicTag(tags map[string][]string, basicName string, tagName string) {
|
||||||
|
basicValue := tags[basicName]
|
||||||
|
if len(basicValue) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
delete(tags, basicName)
|
||||||
|
if len(tags[tagName]) == 0 {
|
||||||
|
tags[tagName] = basicValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||||
|
//
|
||||||
|
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||||
|
//
|
||||||
|
// and breaks it down into a map of roles and names, e.g.:
|
||||||
|
//
|
||||||
|
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||||
|
func parseTIPL(tags map[string][]string) {
|
||||||
|
tipl := tags["tipl"]
|
||||||
|
if len(tipl) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
addRole := func(currentRole string, currentValue []string) {
|
||||||
|
if currentRole != "" && len(currentValue) > 0 {
|
||||||
|
role := tiplMapping[currentRole]
|
||||||
|
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var currentRole string
|
||||||
|
var currentValue []string
|
||||||
|
for _, part := range strings.Split(tipl[0], " ") {
|
||||||
|
if _, ok := tiplMapping[part]; ok {
|
||||||
|
addRole(currentRole, currentValue)
|
||||||
|
currentRole = part
|
||||||
|
currentValue = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
currentValue = append(currentValue, part)
|
||||||
|
}
|
||||||
|
addRole(currentRole, currentValue)
|
||||||
|
delete(tags, "tipl")
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ local.Extractor = (*extractor)(nil)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||||
|
// ignores fs, as taglib extractor only works with local files
|
||||||
|
return &extractor{baseDir}
|
||||||
|
})
|
||||||
|
conf.AddHook(func() {
|
||||||
|
log.Debug("TagLib version", "version", Version())
|
||||||
|
})
|
||||||
|
}
|
||||||
17
adapters/taglib/taglib_suite_test.go
Normal file
17
adapters/taglib/taglib_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package taglib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTagLib(t *testing.T) {
|
||||||
|
tests.Init(t, true)
|
||||||
|
log.SetLevel(log.LevelFatal)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "TagLib Suite")
|
||||||
|
}
|
||||||
296
adapters/taglib/taglib_test.go
Normal file
296
adapters/taglib/taglib_test.go
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
package taglib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/fs"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/utils"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Extractor", func() {
|
||||||
|
var e *extractor
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
e = &extractor{}
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Parse", func() {
|
||||||
|
It("correctly parses metadata from all files in folder", func() {
|
||||||
|
mds, err := e.Parse(
|
||||||
|
"tests/fixtures/test.mp3",
|
||||||
|
"tests/fixtures/test.ogg",
|
||||||
|
)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(mds).To(HaveLen(2))
|
||||||
|
|
||||||
|
// Test MP3
|
||||||
|
m := mds["tests/fixtures/test.mp3"]
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||||
|
|
||||||
|
Expect(m.HasPicture).To(BeTrue())
|
||||||
|
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||||
|
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||||
|
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||||
|
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("compilation", []string{"1"}),
|
||||||
|
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||||
|
)
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||||
|
|
||||||
|
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||||
|
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||||
|
"[00:00.00]This is\n[00:02.50]English",
|
||||||
|
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]English",
|
||||||
|
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||||
|
})))
|
||||||
|
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||||
|
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||||
|
})))
|
||||||
|
|
||||||
|
// Test OGG
|
||||||
|
m = mds["tests/fixtures/test.ogg"]
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||||
|
|
||||||
|
// TabLib 1.12 returns 18, previous versions return 39.
|
||||||
|
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||||
|
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||||
|
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||||
|
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||||
|
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||||
|
Expect(m.HasPicture).To(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
DescribeTable("Format-Specific tests",
|
||||||
|
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||||
|
file = "tests/fixtures/" + file
|
||||||
|
mds, err := e.Parse(file)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(mds).To(HaveLen(1))
|
||||||
|
|
||||||
|
m := mds[file]
|
||||||
|
|
||||||
|
Expect(m.HasPicture).To(Equal(image))
|
||||||
|
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||||
|
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||||
|
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||||
|
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||||
|
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||||
|
))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||||
|
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||||
|
))
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||||
|
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||||
|
))
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||||
|
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||||
|
))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||||
|
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||||
|
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||||
|
))
|
||||||
|
if !strings.HasSuffix(file, "test.wma") {
|
||||||
|
// TODO Not sure why this is not working for WMA
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||||
|
}
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||||
|
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||||
|
))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||||
|
|
||||||
|
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||||
|
Expect(m.Tags).To(Or(
|
||||||
|
HaveKeyWithValue("compilation", []string{"1"}),
|
||||||
|
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||||
|
)
|
||||||
|
|
||||||
|
if id3Lyrics {
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]English",
|
||||||
|
}))
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||||
|
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||||
|
"[00:00.00]This is\n[00:02.50]English",
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||||
|
},
|
||||||
|
|
||||||
|
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||||
|
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||||
|
|
||||||
|
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||||
|
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||||
|
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||||
|
|
||||||
|
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||||
|
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||||
|
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||||
|
|
||||||
|
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||||
|
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||||
|
|
||||||
|
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||||
|
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||||
|
|
||||||
|
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||||
|
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Skip these tests when running as root
|
||||||
|
Context("Access Forbidden", func() {
|
||||||
|
var accessForbiddenFile string
|
||||||
|
var RegularUserContext = XContext
|
||||||
|
var isRegularUser = os.Getuid() != 0
|
||||||
|
if isRegularUser {
|
||||||
|
RegularUserContext = Context
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only run permission tests if we are not root
|
||||||
|
RegularUserContext("when run without root privileges", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||||
|
|
||||||
|
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
DeferCleanup(func() {
|
||||||
|
Expect(f.Close()).To(Succeed())
|
||||||
|
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||||
|
_, err := e.extractMetadata(accessForbiddenFile)
|
||||||
|
Expect(err).To(MatchError(os.ErrPermission))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("skips the file if it cannot be read", func() {
|
||||||
|
files := []string{
|
||||||
|
"tests/fixtures/test.mp3",
|
||||||
|
"tests/fixtures/test.ogg",
|
||||||
|
accessForbiddenFile,
|
||||||
|
}
|
||||||
|
mds, err := e.Parse(files...)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(mds).To(HaveLen(2))
|
||||||
|
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Error Checking", func() {
|
||||||
|
It("returns a generic ErrPath if file does not exist", func() {
|
||||||
|
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||||
|
_, err := e.extractMetadata(testFilePath)
|
||||||
|
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||||
|
})
|
||||||
|
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||||
|
// File has an empty TDAT frame
|
||||||
|
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("parseTIPL", func() {
|
||||||
|
var tags map[string][]string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
tags = make(map[string][]string)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the TIPL string is populated", func() {
|
||||||
|
It("correctly parses roles and names", func() {
|
||||||
|
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||||
|
parseTIPL(tags)
|
||||||
|
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||||
|
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||||
|
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles multiple names for a single role", func() {
|
||||||
|
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||||
|
parseTIPL(tags)
|
||||||
|
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||||
|
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("discards roles without names", func() {
|
||||||
|
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||||
|
parseTIPL(tags)
|
||||||
|
Expect(tags).ToNot(HaveKey("producer"))
|
||||||
|
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the TIPL string is empty", func() {
|
||||||
|
It("does nothing", func() {
|
||||||
|
tags["tipl"] = []string{""}
|
||||||
|
parseTIPL(tags)
|
||||||
|
Expect(tags).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the TIPL is not present", func() {
|
||||||
|
It("does nothing", func() {
|
||||||
|
parseTIPL(tags)
|
||||||
|
Expect(tags).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
299
adapters/taglib/taglib_wrapper.cpp
Normal file
299
adapters/taglib/taglib_wrapper.cpp
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define TAGLIB_STATIC
|
||||||
|
#include <apeproperties.h>
|
||||||
|
#include <apetag.h>
|
||||||
|
#include <aifffile.h>
|
||||||
|
#include <asffile.h>
|
||||||
|
#include <dsffile.h>
|
||||||
|
#include <fileref.h>
|
||||||
|
#include <flacfile.h>
|
||||||
|
#include <id3v2tag.h>
|
||||||
|
#include <unsynchronizedlyricsframe.h>
|
||||||
|
#include <synchronizedlyricsframe.h>
|
||||||
|
#include <mp4file.h>
|
||||||
|
#include <mpegfile.h>
|
||||||
|
#include <opusfile.h>
|
||||||
|
#include <tpropertymap.h>
|
||||||
|
#include <vorbisfile.h>
|
||||||
|
#include <wavfile.h>
|
||||||
|
#include <wavfile.h>
|
||||||
|
#include <wavpackfile.h>
|
||||||
|
|
||||||
|
#include "taglib_wrapper.h"
|
||||||
|
|
||||||
|
char has_cover(const TagLib::FileRef f);
|
||||||
|
|
||||||
|
static char TAGLIB_VERSION[16];
|
||||||
|
|
||||||
|
char* taglib_version() {
|
||||||
|
snprintf((char *)TAGLIB_VERSION, 16, "%d.%d.%d", TAGLIB_MAJOR_VERSION, TAGLIB_MINOR_VERSION, TAGLIB_PATCH_VERSION);
|
||||||
|
return (char *)TAGLIB_VERSION;
|
||||||
|
}
|
||||||
|
|
||||||
|
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||||
|
TagLib::FileRef f(filename, true, TagLib::AudioProperties::Fast);
|
||||||
|
|
||||||
|
if (f.isNull()) {
|
||||||
|
return TAGLIB_ERR_PARSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!f.audioProperties()) {
|
||||||
|
return TAGLIB_ERR_AUDIO_PROPS;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add audio properties to the tags
|
||||||
|
const TagLib::AudioProperties *props(f.audioProperties());
|
||||||
|
goPutInt(id, (char *)"__lengthinmilliseconds", props->lengthInMilliseconds());
|
||||||
|
goPutInt(id, (char *)"__bitrate", props->bitrate());
|
||||||
|
goPutInt(id, (char *)"__channels", props->channels());
|
||||||
|
goPutInt(id, (char *)"__samplerate", props->sampleRate());
|
||||||
|
|
||||||
|
// Extract bits per sample for supported formats
|
||||||
|
int bitsPerSample = 0;
|
||||||
|
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
|
||||||
|
bitsPerSample = apeProperties->bitsPerSample();
|
||||||
|
else if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
|
||||||
|
bitsPerSample = asfProperties->bitsPerSample();
|
||||||
|
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
|
||||||
|
bitsPerSample = flacProperties->bitsPerSample();
|
||||||
|
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
|
||||||
|
bitsPerSample = mp4Properties->bitsPerSample();
|
||||||
|
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
|
||||||
|
bitsPerSample = wavePackProperties->bitsPerSample();
|
||||||
|
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
|
||||||
|
bitsPerSample = aiffProperties->bitsPerSample();
|
||||||
|
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
|
||||||
|
bitsPerSample = wavProperties->bitsPerSample();
|
||||||
|
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
|
||||||
|
bitsPerSample = dsfProperties->bitsPerSample();
|
||||||
|
|
||||||
|
if (bitsPerSample > 0) {
|
||||||
|
goPutInt(id, (char *)"__bitspersample", bitsPerSample);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send all properties to the Go map
|
||||||
|
TagLib::PropertyMap tags = f.file()->properties();
|
||||||
|
|
||||||
|
// Make sure at least the basic properties are extracted
|
||||||
|
TagLib::Tag *basic = f.file()->tag();
|
||||||
|
if (!basic->isEmpty()) {
|
||||||
|
if (!basic->title().isEmpty()) {
|
||||||
|
tags.insert("__title", basic->title());
|
||||||
|
}
|
||||||
|
if (!basic->artist().isEmpty()) {
|
||||||
|
tags.insert("__artist", basic->artist());
|
||||||
|
}
|
||||||
|
if (!basic->album().isEmpty()) {
|
||||||
|
tags.insert("__album", basic->album());
|
||||||
|
}
|
||||||
|
if (!basic->comment().isEmpty()) {
|
||||||
|
tags.insert("__comment", basic->comment());
|
||||||
|
}
|
||||||
|
if (!basic->genre().isEmpty()) {
|
||||||
|
tags.insert("__genre", basic->genre());
|
||||||
|
}
|
||||||
|
if (basic->year() > 0) {
|
||||||
|
tags.insert("__year", TagLib::String::number(basic->year()));
|
||||||
|
}
|
||||||
|
if (basic->track() > 0) {
|
||||||
|
tags.insert("__track", TagLib::String::number(basic->track()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TagLib::ID3v2::Tag *id3Tags = NULL;
|
||||||
|
|
||||||
|
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)
|
||||||
|
TagLib::MPEG::File *mp3File(dynamic_cast<TagLib::MPEG::File *>(f.file()));
|
||||||
|
if (mp3File != NULL) {
|
||||||
|
id3Tags = mp3File->ID3v2Tag();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (id3Tags == NULL) {
|
||||||
|
TagLib::RIFF::WAV::File *wavFile(dynamic_cast<TagLib::RIFF::WAV::File *>(f.file()));
|
||||||
|
if (wavFile != NULL && wavFile->hasID3v2Tag()) {
|
||||||
|
id3Tags = wavFile->ID3v2Tag();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (id3Tags == NULL) {
|
||||||
|
TagLib::RIFF::AIFF::File *aiffFile(dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file()));
|
||||||
|
if (aiffFile && aiffFile->hasID3v2Tag()) {
|
||||||
|
id3Tags = aiffFile->tag();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Yes, it is possible to have ID3v2 tags in FLAC. However, that can cause problems
|
||||||
|
// with many players, so they will not be parsed
|
||||||
|
|
||||||
|
if (id3Tags != NULL) {
|
||||||
|
const auto &frames = id3Tags->frameListMap();
|
||||||
|
|
||||||
|
for (const auto &kv: frames) {
|
||||||
|
if (kv.first == "USLT") {
|
||||||
|
for (const auto &tag: kv.second) {
|
||||||
|
TagLib::ID3v2::UnsynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::UnsynchronizedLyricsFrame *>(tag);
|
||||||
|
if (frame == NULL) continue;
|
||||||
|
|
||||||
|
tags.erase("LYRICS");
|
||||||
|
|
||||||
|
const auto bv = frame->language();
|
||||||
|
char language[4] = {'x', 'x', 'x', '\0'};
|
||||||
|
if (bv.size() == 3) {
|
||||||
|
strncpy(language, bv.data(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
char *val = const_cast<char*>(frame->text().toCString(true));
|
||||||
|
|
||||||
|
goPutLyrics(id, language, val);
|
||||||
|
}
|
||||||
|
} else if (kv.first == "SYLT") {
|
||||||
|
for (const auto &tag: kv.second) {
|
||||||
|
TagLib::ID3v2::SynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::SynchronizedLyricsFrame *>(tag);
|
||||||
|
if (frame == NULL) continue;
|
||||||
|
|
||||||
|
const auto bv = frame->language();
|
||||||
|
char language[4] = {'x', 'x', 'x', '\0'};
|
||||||
|
if (bv.size() == 3) {
|
||||||
|
strncpy(language, bv.data(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
const auto format = frame->timestampFormat();
|
||||||
|
if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMilliseconds) {
|
||||||
|
|
||||||
|
for (const auto &line: frame->synchedText()) {
|
||||||
|
char *text = const_cast<char*>(line.text.toCString(true));
|
||||||
|
goPutLyricLine(id, language, text, line.time);
|
||||||
|
}
|
||||||
|
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
|
||||||
|
const int sampleRate = props->sampleRate();
|
||||||
|
|
||||||
|
if (sampleRate != 0) {
|
||||||
|
for (const auto &line: frame->synchedText()) {
|
||||||
|
const int timeInMs = (line.time * 1000) / sampleRate;
|
||||||
|
char *text = const_cast<char*>(line.text.toCString(true));
|
||||||
|
goPutLyricLine(id, language, text, timeInMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (kv.first == "TIPL"){
|
||||||
|
if (!kv.second.isEmpty()) {
|
||||||
|
tags.insert(kv.first, kv.second.front()->toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
|
||||||
|
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
|
||||||
|
if (m4afile != NULL) {
|
||||||
|
const auto itemListMap = m4afile->tag()->itemMap();
|
||||||
|
for (const auto item: itemListMap) {
|
||||||
|
char *key = const_cast<char*>(item.first.toCString(true));
|
||||||
|
for (const auto value: item.second.toStringList()) {
|
||||||
|
char *val = const_cast<char*>(value.toCString(true));
|
||||||
|
goPutM4AStr(id, key, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
|
||||||
|
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
|
||||||
|
if (asfFile != NULL) {
|
||||||
|
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
|
||||||
|
const auto itemListMap = asfTags->attributeListMap();
|
||||||
|
for (const auto item : itemListMap) {
|
||||||
|
char *key = const_cast<char*>(item.first.toCString(true));
|
||||||
|
|
||||||
|
for (auto j = item.second.begin();
|
||||||
|
j != item.second.end(); ++j) {
|
||||||
|
|
||||||
|
char *val = const_cast<char*>(j->toString().toCString(true));
|
||||||
|
goPutStr(id, key, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send all collected tags to the Go map
|
||||||
|
for (TagLib::PropertyMap::ConstIterator i = tags.begin(); i != tags.end();
|
||||||
|
++i) {
|
||||||
|
char *key = const_cast<char*>(i->first.toCString(true));
|
||||||
|
for (TagLib::StringList::ConstIterator j = i->second.begin();
|
||||||
|
j != i->second.end(); ++j) {
|
||||||
|
char *val = const_cast<char*>((*j).toCString(true));
|
||||||
|
goPutStr(id, key, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cover art has to be handled separately
|
||||||
|
if (has_cover(f)) {
|
||||||
|
goPutStr(id, (char *)"has_picture", (char *)"true");
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect if the file has cover art. Returns 1 if the file has cover art, 0 otherwise.
|
||||||
|
char has_cover(const TagLib::FileRef f) {
|
||||||
|
char hasCover = 0;
|
||||||
|
// ----- MP3
|
||||||
|
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||||
|
if (mp3File->ID3v2Tag()) {
|
||||||
|
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
|
||||||
|
hasCover = !frameListMap["APIC"].isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ----- FLAC
|
||||||
|
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||||
|
hasCover = !flacFile->pictureList().isEmpty();
|
||||||
|
}
|
||||||
|
// ----- MP4
|
||||||
|
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||||
|
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
|
||||||
|
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
|
||||||
|
hasCover = !coverArtList.isEmpty();
|
||||||
|
}
|
||||||
|
// ----- Ogg
|
||||||
|
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||||
|
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
|
||||||
|
}
|
||||||
|
// ----- Opus
|
||||||
|
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||||
|
hasCover = !opusFile->tag()->pictureList().isEmpty();
|
||||||
|
}
|
||||||
|
// ----- WAV
|
||||||
|
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
|
||||||
|
if (wavFile->hasID3v2Tag()) {
|
||||||
|
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
|
||||||
|
hasCover = !frameListMap["APIC"].isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ----- AIFF
|
||||||
|
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
|
||||||
|
if (aiffFile->hasID3v2Tag()) {
|
||||||
|
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
|
||||||
|
hasCover = !frameListMap["APIC"].isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ----- WMA
|
||||||
|
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||||
|
const TagLib::ASF::Tag *tag{ asfFile->tag() };
|
||||||
|
hasCover = tag && tag->attributeListMap().contains("WM/Picture");
|
||||||
|
}
|
||||||
|
// ----- DSF
|
||||||
|
else if (TagLib::DSF::File * dsffile{ dynamic_cast<TagLib::DSF::File *>(f.file())}) {
|
||||||
|
const TagLib::ID3v2::Tag *tag { dsffile->tag() };
|
||||||
|
hasCover = tag && !tag->frameListMap()["APIC"].isEmpty();
|
||||||
|
}
|
||||||
|
// ----- WAVPAK (APE tag)
|
||||||
|
else if (TagLib::WavPack::File * wvFile{dynamic_cast<TagLib::WavPack::File *>(f.file())}) {
|
||||||
|
if (wvFile->hasAPETag()) {
|
||||||
|
// This is the particular string that Picard uses
|
||||||
|
hasCover = !wvFile->APETag()->itemListMap()["COVER ART (FRONT)"].isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasCover;
|
||||||
|
}
|
||||||
157
adapters/taglib/taglib_wrapper.go
Normal file
157
adapters/taglib/taglib_wrapper.go
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
package taglib
|
||||||
|
|
||||||
|
/*
|
||||||
|
#cgo !windows pkg-config: --define-prefix taglib
|
||||||
|
#cgo windows pkg-config: taglib
|
||||||
|
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||||
|
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||||
|
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include "taglib_wrapper.h"
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"runtime/debug"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||||
|
|
||||||
|
func Version() string {
|
||||||
|
return C.GoString(C.taglib_version())
|
||||||
|
}
|
||||||
|
|
||||||
|
func Read(filename string) (tags map[string][]string, err error) {
|
||||||
|
// Do not crash on failures in the C code/library
|
||||||
|
debug.SetPanicOnFault(true)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||||
|
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
fp := getFilename(filename)
|
||||||
|
defer C.free(unsafe.Pointer(fp))
|
||||||
|
id, m, release := newMap()
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||||
|
res := C.taglib_read(fp, C.ulong(id))
|
||||||
|
switch res {
|
||||||
|
case C.TAGLIB_ERR_PARSE:
|
||||||
|
// Check additional case whether the file is unreadable due to permission
|
||||||
|
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
if os.IsPermission(fileErr) {
|
||||||
|
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||||
|
} else if fileErr != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("cannot parse file media file")
|
||||||
|
}
|
||||||
|
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||||
|
return nil, fmt.Errorf("can't get audio properties from file")
|
||||||
|
}
|
||||||
|
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||||
|
j, _ := json.Marshal(m)
|
||||||
|
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||||
|
} else {
|
||||||
|
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagMap map[string][]string
|
||||||
|
|
||||||
|
var allMaps sync.Map
|
||||||
|
var mapsNextID atomic.Uint32
|
||||||
|
|
||||||
|
func newMap() (uint32, tagMap, func()) {
|
||||||
|
id := mapsNextID.Add(1)
|
||||||
|
|
||||||
|
m := tagMap{}
|
||||||
|
allMaps.Store(id, m)
|
||||||
|
|
||||||
|
return id, m, func() {
|
||||||
|
allMaps.Delete(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||||
|
if key == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r, _ := allMaps.Load(uint32(id))
|
||||||
|
m := r.(tagMap)
|
||||||
|
k := strings.ToLower(key)
|
||||||
|
v := strings.TrimSpace(C.GoString(val))
|
||||||
|
m[k] = append(m[k], v)
|
||||||
|
}
|
||||||
|
|
||||||
|
//export goPutM4AStr
|
||||||
|
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||||
|
k := C.GoString(key)
|
||||||
|
|
||||||
|
// Special for M4A, do not catch keys that have no actual name
|
||||||
|
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||||
|
doPutTag(id, k, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
//export goPutStr
|
||||||
|
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||||
|
doPutTag(id, C.GoString(key), val)
|
||||||
|
}
|
||||||
|
|
||||||
|
//export goPutInt
|
||||||
|
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||||
|
valStr := strconv.Itoa(int(val))
|
||||||
|
vp := C.CString(valStr)
|
||||||
|
defer C.free(unsafe.Pointer(vp))
|
||||||
|
goPutStr(id, key, vp)
|
||||||
|
}
|
||||||
|
|
||||||
|
//export goPutLyrics
|
||||||
|
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||||
|
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||||
|
}
|
||||||
|
|
||||||
|
//export goPutLyricLine
|
||||||
|
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||||
|
language := C.GoString(lang)
|
||||||
|
line := C.GoString(text)
|
||||||
|
timeGo := int64(time)
|
||||||
|
|
||||||
|
ms := timeGo % 1000
|
||||||
|
timeGo /= 1000
|
||||||
|
sec := timeGo % 60
|
||||||
|
timeGo /= 60
|
||||||
|
minimum := timeGo % 60
|
||||||
|
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||||
|
|
||||||
|
key := "lyrics:" + language
|
||||||
|
|
||||||
|
r, _ := allMaps.Load(uint32(id))
|
||||||
|
m := r.(tagMap)
|
||||||
|
k := strings.ToLower(key)
|
||||||
|
existing, ok := m[k]
|
||||||
|
if ok {
|
||||||
|
existing[0] += formattedLine
|
||||||
|
} else {
|
||||||
|
m[k] = []string{formattedLine}
|
||||||
|
}
|
||||||
|
}
|
||||||
24
adapters/taglib/taglib_wrapper.h
Normal file
24
adapters/taglib/taglib_wrapper.h
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
#define TAGLIB_ERR_PARSE -1
|
||||||
|
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WIN32
|
||||||
|
#define FILENAME_CHAR_T wchar_t
|
||||||
|
#else
|
||||||
|
#define FILENAME_CHAR_T char
|
||||||
|
#endif
|
||||||
|
|
||||||
|
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||||
|
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||||
|
extern void goPutInt(unsigned long id, char *key, int val);
|
||||||
|
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||||
|
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||||
|
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||||
|
char* taglib_version();
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
186
cmd/backup.go
Normal file
186
cmd/backup.go
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
backupCount int
|
||||||
|
backupDir string
|
||||||
|
force bool
|
||||||
|
restorePath string
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(backupRoot)
|
||||||
|
|
||||||
|
backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
|
||||||
|
backupRoot.AddCommand(backupCmd)
|
||||||
|
|
||||||
|
pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
|
||||||
|
pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
|
||||||
|
pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
|
||||||
|
backupRoot.AddCommand(pruneCmd)
|
||||||
|
|
||||||
|
restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
|
||||||
|
restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
|
||||||
|
_ = restoreCommand.MarkFlagRequired("backup-file")
|
||||||
|
backupRoot.AddCommand(restoreCommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
backupRoot = &cobra.Command{
|
||||||
|
Use: "backup",
|
||||||
|
Aliases: []string{"bkp"},
|
||||||
|
Short: "Create, restore and prune database backups",
|
||||||
|
Long: "Create, restore and prune database backups",
|
||||||
|
}
|
||||||
|
|
||||||
|
backupCmd = &cobra.Command{
|
||||||
|
Use: "create",
|
||||||
|
Short: "Create a backup database",
|
||||||
|
Long: "Manually backup Navidrome database. This will ignore BackupCount",
|
||||||
|
Run: func(cmd *cobra.Command, _ []string) {
|
||||||
|
runBackup(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pruneCmd = &cobra.Command{
|
||||||
|
Use: "prune",
|
||||||
|
Short: "Prune database backups",
|
||||||
|
Long: "Manually prune database backups according to backup rules",
|
||||||
|
Run: func(cmd *cobra.Command, _ []string) {
|
||||||
|
runPrune(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
restoreCommand = &cobra.Command{
|
||||||
|
Use: "restore",
|
||||||
|
Short: "Restore Navidrome database",
|
||||||
|
Long: "Restore Navidrome database from a backup. This must be done offline",
|
||||||
|
Run: func(cmd *cobra.Command, _ []string) {
|
||||||
|
runRestore(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func runBackup(ctx context.Context) {
|
||||||
|
if backupDir != "" {
|
||||||
|
conf.Server.Backup.Path = backupDir
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||||
|
var path string
|
||||||
|
|
||||||
|
if idx == -1 {
|
||||||
|
path = conf.Server.DbPath
|
||||||
|
} else {
|
||||||
|
path = conf.Server.DbPath[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
log.Fatal("No existing database", "path", path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
path, err := db.Backup(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
log.Info("Backup complete", "elapsed", elapsed, "path", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runPrune(ctx context.Context) {
|
||||||
|
if backupDir != "" {
|
||||||
|
conf.Server.Backup.Path = backupDir
|
||||||
|
}
|
||||||
|
|
||||||
|
if backupCount != -1 {
|
||||||
|
conf.Server.Backup.Count = backupCount
|
||||||
|
}
|
||||||
|
|
||||||
|
if conf.Server.Backup.Count == 0 && !force {
|
||||||
|
fmt.Println("Warning: pruning ALL backups")
|
||||||
|
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||||
|
var input string
|
||||||
|
_, err := fmt.Scanln(&input)
|
||||||
|
|
||||||
|
if input != "YES" || err != nil {
|
||||||
|
log.Warn("Prune cancelled")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||||
|
var path string
|
||||||
|
|
||||||
|
if idx == -1 {
|
||||||
|
path = conf.Server.DbPath
|
||||||
|
} else {
|
||||||
|
path = conf.Server.DbPath[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
log.Fatal("No existing database", "path", path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
count, err := db.Prune(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
|
||||||
|
log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runRestore(ctx context.Context) {
|
||||||
|
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||||
|
var path string
|
||||||
|
|
||||||
|
if idx == -1 {
|
||||||
|
path = conf.Server.DbPath
|
||||||
|
} else {
|
||||||
|
path = conf.Server.DbPath[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||||
|
log.Fatal("No existing database", "path", path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !force {
|
||||||
|
fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
|
||||||
|
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||||
|
var input string
|
||||||
|
_, err := fmt.Scanln(&input)
|
||||||
|
|
||||||
|
if input != "YES" || err != nil {
|
||||||
|
log.Warn("Restore cancelled")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
err := db.Restore(ctx, restorePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
log.Info("Restore complete", "elapsed", elapsed)
|
||||||
|
}
|
||||||
17
cmd/cmd_suite_test.go
Normal file
17
cmd/cmd_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCmd(t *testing.T) {
|
||||||
|
tests.Init(t, false)
|
||||||
|
log.SetLevel(log.LevelFatal)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Cmd Suite")
|
||||||
|
}
|
||||||
35
cmd/index.go
Normal file
35
cmd/index.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var indexCmd = &cobra.Command{
|
||||||
|
Use: "index",
|
||||||
|
Short: "Manage Meilisearch index",
|
||||||
|
}
|
||||||
|
|
||||||
|
var indexFullCmd = &cobra.Command{
|
||||||
|
Use: "full",
|
||||||
|
Short: "Full re-index of all media files, albums, and artists",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
ctx := cmd.Context()
|
||||||
|
defer db.Init(ctx)()
|
||||||
|
|
||||||
|
ds := CreateDataStore()
|
||||||
|
|
||||||
|
err := ds.ReindexAll(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error during full re-index", err)
|
||||||
|
} else {
|
||||||
|
log.Info("Full re-index completed successfully")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(indexCmd)
|
||||||
|
indexCmd.AddCommand(indexFullCmd)
|
||||||
|
}
|
||||||
79
cmd/inspect.go
Normal file
79
cmd/inspect.go
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/core"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
format string
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||||
|
rootCmd.AddCommand(inspectCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
var inspectCmd = &cobra.Command{
|
||||||
|
Use: "inspect [files to inspect]",
|
||||||
|
Short: "Inspect tags",
|
||||||
|
Long: "Show file tags as seen by Navidrome",
|
||||||
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runInspector(args)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||||
|
"pretty": prettyMarshal,
|
||||||
|
"toml": toml.Marshal,
|
||||||
|
"yaml": yaml.Marshal,
|
||||||
|
"json": json.Marshal,
|
||||||
|
"jsonindent": func(v interface{}) ([]byte, error) {
|
||||||
|
return json.MarshalIndent(v, "", " ")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||||
|
out := v.([]core.InspectOutput)
|
||||||
|
var res strings.Builder
|
||||||
|
for i := range out {
|
||||||
|
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||||
|
t, _ := toml.Marshal(out[i].RawTags)
|
||||||
|
res.WriteString(fmt.Sprintf("Raw tags:\n%s\n\n", t))
|
||||||
|
t, _ = toml.Marshal(out[i].MappedTags)
|
||||||
|
res.WriteString(fmt.Sprintf("Mapped tags:\n%s\n\n", t))
|
||||||
|
}
|
||||||
|
return []byte(res.String()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runInspector(args []string) {
|
||||||
|
marshal := marshalers[format]
|
||||||
|
if marshal == nil {
|
||||||
|
log.Fatal("Invalid format", "format", format)
|
||||||
|
}
|
||||||
|
var out []core.InspectOutput
|
||||||
|
for _, filePath := range args {
|
||||||
|
if !model.IsAudioFile(filePath) {
|
||||||
|
log.Warn("Not an audio file", "file", filePath)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
output, err := core.Inspect(filePath, 1, "")
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
out = append(out, *output)
|
||||||
|
}
|
||||||
|
data, _ := marshal(out)
|
||||||
|
fmt.Println(string(data))
|
||||||
|
}
|
||||||
139
cmd/pls.go
Normal file
139
cmd/pls.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/csv"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/Masterminds/squirrel"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
playlistID string
|
||||||
|
outputFile string
|
||||||
|
userID string
|
||||||
|
outputFormat string
|
||||||
|
)
|
||||||
|
|
||||||
|
type displayPlaylist struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
OwnerName string `json:"ownerName"`
|
||||||
|
OwnerId string `json:"ownerId"`
|
||||||
|
Public bool `json:"public"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type displayPlaylists []displayPlaylist
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
|
||||||
|
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
|
||||||
|
_ = plsCmd.MarkFlagRequired("playlist")
|
||||||
|
rootCmd.AddCommand(plsCmd)
|
||||||
|
|
||||||
|
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
|
||||||
|
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||||
|
plsCmd.AddCommand(listCommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
plsCmd = &cobra.Command{
|
||||||
|
Use: "pls",
|
||||||
|
Short: "Export playlists",
|
||||||
|
Long: "Export Navidrome playlists to M3U files",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runExporter(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
listCommand = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List playlists",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runList(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func runExporter(ctx context.Context) {
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||||
|
}
|
||||||
|
if errors.Is(err, model.ErrNotFound) {
|
||||||
|
playlists, err := ds.Playlist(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"playlist.name": playlistID}})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||||
|
}
|
||||||
|
if len(playlists) > 0 {
|
||||||
|
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if playlist == nil {
|
||||||
|
log.Fatal("Playlist not found", "name", playlistID)
|
||||||
|
}
|
||||||
|
pls := playlist.ToM3U8()
|
||||||
|
if outputFile == "-" || outputFile == "" {
|
||||||
|
println(pls)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(outputFile, []byte(pls), 0600)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error writing to the output file", "file", outputFile, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runList(ctx context.Context) {
|
||||||
|
if outputFormat != "csv" && outputFormat != "json" {
|
||||||
|
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
options := model.QueryOptions{Sort: "owner_name"}
|
||||||
|
|
||||||
|
if userID != "" {
|
||||||
|
user, err := getUser(ctx, userID, ds)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Error retrieving user", "username or id", userID)
|
||||||
|
}
|
||||||
|
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||||
|
}
|
||||||
|
|
||||||
|
playlists, err := ds.Playlist(ctx).GetAll(options)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to retrieve playlists", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if outputFormat == "csv" {
|
||||||
|
w := csv.NewWriter(os.Stdout)
|
||||||
|
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
|
||||||
|
for _, playlist := range playlists {
|
||||||
|
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
} else {
|
||||||
|
display := make(displayPlaylists, len(playlists))
|
||||||
|
for idx, playlist := range playlists {
|
||||||
|
display[idx].Id = playlist.ID
|
||||||
|
display[idx].Name = playlist.Name
|
||||||
|
display[idx].OwnerId = playlist.OwnerID
|
||||||
|
display[idx].OwnerName = playlist.OwnerName
|
||||||
|
display[idx].Public = playlist.Public
|
||||||
|
}
|
||||||
|
|
||||||
|
j, _ := json.Marshal(display)
|
||||||
|
fmt.Printf("%s\n", j)
|
||||||
|
}
|
||||||
|
}
|
||||||
716
cmd/plugin.go
Normal file
716
cmd/plugin.go
Normal file
@@ -0,0 +1,716 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"text/tabwriter"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/plugins"
|
||||||
|
"github.com/navidrome/navidrome/plugins/schema"
|
||||||
|
"github.com/navidrome/navidrome/utils"
|
||||||
|
"github.com/navidrome/navidrome/utils/slice"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
pluginPackageExtension = ".ndp"
|
||||||
|
pluginDirPermissions = 0700
|
||||||
|
pluginFilePermissions = 0600
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
pluginCmd := &cobra.Command{
|
||||||
|
Use: "plugin",
|
||||||
|
Short: "Manage Navidrome plugins",
|
||||||
|
Long: "Commands for managing Navidrome plugins",
|
||||||
|
}
|
||||||
|
|
||||||
|
listCmd := &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List installed plugins",
|
||||||
|
Long: "List all installed plugins with their metadata",
|
||||||
|
Run: pluginList,
|
||||||
|
}
|
||||||
|
|
||||||
|
infoCmd := &cobra.Command{
|
||||||
|
Use: "info [pluginPackage|pluginName]",
|
||||||
|
Short: "Show details of a plugin",
|
||||||
|
Long: "Show detailed information about a plugin package (.ndp file) or an installed plugin",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
installCmd := &cobra.Command{
|
||||||
|
Use: "install [pluginPackage]",
|
||||||
|
Short: "Install a plugin from a .ndp file",
|
||||||
|
Long: "Install a Navidrome Plugin Package (.ndp) file",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginInstall,
|
||||||
|
}
|
||||||
|
|
||||||
|
removeCmd := &cobra.Command{
|
||||||
|
Use: "remove [pluginName]",
|
||||||
|
Short: "Remove an installed plugin",
|
||||||
|
Long: "Remove a plugin by name",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginRemove,
|
||||||
|
}
|
||||||
|
|
||||||
|
updateCmd := &cobra.Command{
|
||||||
|
Use: "update [pluginPackage]",
|
||||||
|
Short: "Update an existing plugin",
|
||||||
|
Long: "Update an installed plugin with a new version from a .ndp file",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginUpdate,
|
||||||
|
}
|
||||||
|
|
||||||
|
refreshCmd := &cobra.Command{
|
||||||
|
Use: "refresh [pluginName]",
|
||||||
|
Short: "Reload a plugin without restarting Navidrome",
|
||||||
|
Long: "Reload and recompile a plugin without needing to restart Navidrome",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginRefresh,
|
||||||
|
}
|
||||||
|
|
||||||
|
devCmd := &cobra.Command{
|
||||||
|
Use: "dev [folder_path]",
|
||||||
|
Short: "Create symlink to development folder",
|
||||||
|
Long: "Create a symlink from a plugin development folder to the plugins directory for easier development",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: pluginDev,
|
||||||
|
}
|
||||||
|
|
||||||
|
pluginCmd.AddCommand(listCmd, infoCmd, installCmd, removeCmd, updateCmd, refreshCmd, devCmd)
|
||||||
|
rootCmd.AddCommand(pluginCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validation helpers
|
||||||
|
|
||||||
|
func validatePluginPackageFile(path string) error {
|
||||||
|
if !utils.FileExists(path) {
|
||||||
|
return fmt.Errorf("plugin package not found: %s", path)
|
||||||
|
}
|
||||||
|
if filepath.Ext(path) != pluginPackageExtension {
|
||||||
|
return fmt.Errorf("not a valid plugin package: %s (expected %s extension)", path, pluginPackageExtension)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validatePluginDirectory(pluginsDir, pluginName string) (string, error) {
|
||||||
|
pluginDir := filepath.Join(pluginsDir, pluginName)
|
||||||
|
if !utils.FileExists(pluginDir) {
|
||||||
|
return "", fmt.Errorf("plugin not found: %s (path: %s)", pluginName, pluginDir)
|
||||||
|
}
|
||||||
|
return pluginDir, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolvePluginPath(pluginDir string) (resolvedPath string, isSymlink bool, err error) {
|
||||||
|
// Check if it's a directory or a symlink
|
||||||
|
lstat, err := os.Lstat(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
return "", false, fmt.Errorf("failed to stat plugin: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
isSymlink = lstat.Mode()&os.ModeSymlink != 0
|
||||||
|
|
||||||
|
if isSymlink {
|
||||||
|
// Resolve the symlink target
|
||||||
|
targetDir, err := os.Readlink(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
return "", true, fmt.Errorf("failed to resolve symlink: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If target is a relative path, make it absolute
|
||||||
|
if !filepath.IsAbs(targetDir) {
|
||||||
|
targetDir = filepath.Join(filepath.Dir(pluginDir), targetDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the target exists and is a directory
|
||||||
|
targetInfo, err := os.Stat(targetDir)
|
||||||
|
if err != nil {
|
||||||
|
return "", true, fmt.Errorf("failed to access symlink target %s: %w", targetDir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !targetInfo.IsDir() {
|
||||||
|
return "", true, fmt.Errorf("symlink target is not a directory: %s", targetDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetDir, true, nil
|
||||||
|
} else if !lstat.IsDir() {
|
||||||
|
return "", false, fmt.Errorf("not a valid plugin directory: %s", pluginDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
return pluginDir, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package handling helpers
|
||||||
|
|
||||||
|
func loadAndValidatePackage(ndpPath string) (*plugins.PluginPackage, error) {
|
||||||
|
if err := validatePluginPackageFile(ndpPath); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg, err := plugins.LoadPackage(ndpPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load plugin package: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractAndSetupPlugin(ndpPath, targetDir string) error {
|
||||||
|
if err := plugins.ExtractPackage(ndpPath, targetDir); err != nil {
|
||||||
|
return fmt.Errorf("failed to extract plugin package: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ensurePluginDirPermissions(targetDir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display helpers
|
||||||
|
|
||||||
|
func displayPluginTableRow(w *tabwriter.Writer, discovery plugins.PluginDiscoveryEntry) {
|
||||||
|
if discovery.Error != nil {
|
||||||
|
// Handle global errors (like directory read failure)
|
||||||
|
if discovery.ID == "" {
|
||||||
|
log.Error("Failed to read plugins directory", "folder", conf.Server.Plugins.Folder, discovery.Error)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Handle individual plugin errors - show them in the table
|
||||||
|
fmt.Fprintf(w, "%s\tERROR\tERROR\tERROR\tERROR\t%v\n", discovery.ID, discovery.Error)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark symlinks with an indicator
|
||||||
|
nameDisplay := discovery.Manifest.Name
|
||||||
|
if discovery.IsSymlink {
|
||||||
|
nameDisplay = nameDisplay + " (dev)"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert capabilities to strings
|
||||||
|
capabilities := slice.Map(discovery.Manifest.Capabilities, func(cap schema.PluginManifestCapabilitiesElem) string {
|
||||||
|
return string(cap)
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\n",
|
||||||
|
discovery.ID,
|
||||||
|
nameDisplay,
|
||||||
|
cmp.Or(discovery.Manifest.Author, "-"),
|
||||||
|
cmp.Or(discovery.Manifest.Version, "-"),
|
||||||
|
strings.Join(capabilities, ", "),
|
||||||
|
cmp.Or(discovery.Manifest.Description, "-"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func displayTypedPermissions(permissions schema.PluginManifestPermissions, indent string) {
|
||||||
|
if permissions.Http != nil {
|
||||||
|
fmt.Printf("%shttp:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Http.Reason)
|
||||||
|
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Http.AllowLocalNetwork)
|
||||||
|
fmt.Printf("%s Allowed URLs:\n", indent)
|
||||||
|
for urlPattern, methodEnums := range permissions.Http.AllowedUrls {
|
||||||
|
methods := make([]string, len(methodEnums))
|
||||||
|
for i, methodEnum := range methodEnums {
|
||||||
|
methods[i] = string(methodEnum)
|
||||||
|
}
|
||||||
|
fmt.Printf("%s %s: [%s]\n", indent, urlPattern, strings.Join(methods, ", "))
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Config != nil {
|
||||||
|
fmt.Printf("%sconfig:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Config.Reason)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Scheduler != nil {
|
||||||
|
fmt.Printf("%sscheduler:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Scheduler.Reason)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Websocket != nil {
|
||||||
|
fmt.Printf("%swebsocket:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Websocket.Reason)
|
||||||
|
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Websocket.AllowLocalNetwork)
|
||||||
|
fmt.Printf("%s Allowed URLs: [%s]\n", indent, strings.Join(permissions.Websocket.AllowedUrls, ", "))
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Cache != nil {
|
||||||
|
fmt.Printf("%scache:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Cache.Reason)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Artwork != nil {
|
||||||
|
fmt.Printf("%sartwork:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Artwork.Reason)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if permissions.Subsonicapi != nil {
|
||||||
|
allowedUsers := "All Users"
|
||||||
|
if len(permissions.Subsonicapi.AllowedUsernames) > 0 {
|
||||||
|
allowedUsers = strings.Join(permissions.Subsonicapi.AllowedUsernames, ", ")
|
||||||
|
}
|
||||||
|
fmt.Printf("%ssubsonicapi:\n", indent)
|
||||||
|
fmt.Printf("%s Reason: %s\n", indent, permissions.Subsonicapi.Reason)
|
||||||
|
fmt.Printf("%s Allow Admins: %t\n", indent, permissions.Subsonicapi.AllowAdmins)
|
||||||
|
fmt.Printf("%s Allowed Usernames: [%s]\n", indent, allowedUsers)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func displayPluginDetails(manifest *schema.PluginManifest, fileInfo *pluginFileInfo, permInfo *pluginPermissionInfo) {
|
||||||
|
fmt.Println("\nPlugin Information:")
|
||||||
|
fmt.Printf(" Name: %s\n", manifest.Name)
|
||||||
|
fmt.Printf(" Author: %s\n", manifest.Author)
|
||||||
|
fmt.Printf(" Version: %s\n", manifest.Version)
|
||||||
|
fmt.Printf(" Description: %s\n", manifest.Description)
|
||||||
|
|
||||||
|
fmt.Print(" Capabilities: ")
|
||||||
|
capabilities := make([]string, len(manifest.Capabilities))
|
||||||
|
for i, cap := range manifest.Capabilities {
|
||||||
|
capabilities[i] = string(cap)
|
||||||
|
}
|
||||||
|
fmt.Print(strings.Join(capabilities, ", "))
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
// Display manifest permissions using the typed permissions
|
||||||
|
fmt.Println(" Required Permissions:")
|
||||||
|
displayTypedPermissions(manifest.Permissions, " ")
|
||||||
|
|
||||||
|
// Print file information if available
|
||||||
|
if fileInfo != nil {
|
||||||
|
fmt.Println("Package Information:")
|
||||||
|
fmt.Printf(" File: %s\n", fileInfo.path)
|
||||||
|
fmt.Printf(" Size: %d bytes (%.2f KB)\n", fileInfo.size, float64(fileInfo.size)/1024)
|
||||||
|
fmt.Printf(" SHA-256: %s\n", fileInfo.hash)
|
||||||
|
fmt.Printf(" Modified: %s\n", fileInfo.modTime.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print file permissions information if available
|
||||||
|
if permInfo != nil {
|
||||||
|
fmt.Println("File Permissions:")
|
||||||
|
fmt.Printf(" Plugin Directory: %s (%s)\n", permInfo.dirPath, permInfo.dirMode)
|
||||||
|
if permInfo.isSymlink {
|
||||||
|
fmt.Printf(" Symlink Target: %s (%s)\n", permInfo.targetPath, permInfo.targetMode)
|
||||||
|
}
|
||||||
|
fmt.Printf(" Manifest File: %s\n", permInfo.manifestMode)
|
||||||
|
if permInfo.wasmMode != "" {
|
||||||
|
fmt.Printf(" WASM File: %s\n", permInfo.wasmMode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type pluginFileInfo struct {
|
||||||
|
path string
|
||||||
|
size int64
|
||||||
|
hash string
|
||||||
|
modTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
type pluginPermissionInfo struct {
|
||||||
|
dirPath string
|
||||||
|
dirMode string
|
||||||
|
isSymlink bool
|
||||||
|
targetPath string
|
||||||
|
targetMode string
|
||||||
|
manifestMode string
|
||||||
|
wasmMode string
|
||||||
|
}
|
||||||
|
|
||||||
|
func getFileInfo(path string) *pluginFileInfo {
|
||||||
|
fileInfo, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to get file information", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &pluginFileInfo{
|
||||||
|
path: path,
|
||||||
|
size: fileInfo.Size(),
|
||||||
|
hash: calculateSHA256(path),
|
||||||
|
modTime: fileInfo.ModTime(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPermissionInfo(pluginDir string) *pluginPermissionInfo {
|
||||||
|
// Get plugin directory permissions
|
||||||
|
dirInfo, err := os.Lstat(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to get plugin directory permissions", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
permInfo := &pluginPermissionInfo{
|
||||||
|
dirPath: pluginDir,
|
||||||
|
dirMode: dirInfo.Mode().String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a symlink
|
||||||
|
if dirInfo.Mode()&os.ModeSymlink != 0 {
|
||||||
|
permInfo.isSymlink = true
|
||||||
|
|
||||||
|
// Get target path and permissions
|
||||||
|
targetPath, err := os.Readlink(pluginDir)
|
||||||
|
if err == nil {
|
||||||
|
if !filepath.IsAbs(targetPath) {
|
||||||
|
targetPath = filepath.Join(filepath.Dir(pluginDir), targetPath)
|
||||||
|
}
|
||||||
|
permInfo.targetPath = targetPath
|
||||||
|
|
||||||
|
if targetInfo, err := os.Stat(targetPath); err == nil {
|
||||||
|
permInfo.targetMode = targetInfo.Mode().String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get manifest file permissions
|
||||||
|
manifestPath := filepath.Join(pluginDir, "manifest.json")
|
||||||
|
if manifestInfo, err := os.Stat(manifestPath); err == nil {
|
||||||
|
permInfo.manifestMode = manifestInfo.Mode().String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get WASM file permissions (look for .wasm files)
|
||||||
|
entries, err := os.ReadDir(pluginDir)
|
||||||
|
if err == nil {
|
||||||
|
for _, entry := range entries {
|
||||||
|
if filepath.Ext(entry.Name()) == ".wasm" {
|
||||||
|
wasmPath := filepath.Join(pluginDir, entry.Name())
|
||||||
|
if wasmInfo, err := os.Stat(wasmPath); err == nil {
|
||||||
|
permInfo.wasmMode = wasmInfo.Mode().String()
|
||||||
|
break // Just show the first WASM file found
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return permInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command implementations
|
||||||
|
|
||||||
|
func pluginList(cmd *cobra.Command, args []string) {
|
||||||
|
discoveries := plugins.DiscoverPlugins(conf.Server.Plugins.Folder)
|
||||||
|
|
||||||
|
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||||
|
fmt.Fprintln(w, "ID\tNAME\tAUTHOR\tVERSION\tCAPABILITIES\tDESCRIPTION")
|
||||||
|
|
||||||
|
for _, discovery := range discoveries {
|
||||||
|
displayPluginTableRow(w, discovery)
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginInfo(cmd *cobra.Command, args []string) {
|
||||||
|
path := args[0]
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
var manifest *schema.PluginManifest
|
||||||
|
var fileInfo *pluginFileInfo
|
||||||
|
var permInfo *pluginPermissionInfo
|
||||||
|
|
||||||
|
if filepath.Ext(path) == pluginPackageExtension {
|
||||||
|
// It's a package file
|
||||||
|
pkg, err := loadAndValidatePackage(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to load plugin package", err)
|
||||||
|
}
|
||||||
|
manifest = pkg.Manifest
|
||||||
|
fileInfo = getFileInfo(path)
|
||||||
|
// No permission info for package files
|
||||||
|
} else {
|
||||||
|
// It's a plugin name
|
||||||
|
pluginDir, err := validatePluginDirectory(pluginsDir, path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Plugin validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
manifest, err = plugins.LoadManifest(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to load plugin manifest", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get permission info for installed plugins
|
||||||
|
permInfo = getPermissionInfo(pluginDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
displayPluginDetails(manifest, fileInfo, permInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginInstall(cmd *cobra.Command, args []string) {
|
||||||
|
ndpPath := args[0]
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
pkg, err := loadAndValidatePackage(ndpPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Package validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create target directory based on plugin name
|
||||||
|
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||||
|
|
||||||
|
// Check if plugin already exists
|
||||||
|
if utils.FileExists(targetDir) {
|
||||||
|
log.Fatal("Plugin already installed", "name", pkg.Manifest.Name, "path", targetDir,
|
||||||
|
"use", "navidrome plugin update")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||||
|
log.Fatal("Plugin installation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Plugin '%s' v%s installed successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginRemove(cmd *cobra.Command, args []string) {
|
||||||
|
pluginName := args[0]
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Plugin validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, isSymlink, err := resolvePluginPath(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to resolve plugin path", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isSymlink {
|
||||||
|
// For symlinked plugins (dev mode), just remove the symlink
|
||||||
|
if err := os.Remove(pluginDir); err != nil {
|
||||||
|
log.Fatal("Failed to remove plugin symlink", "name", pluginName, err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Development plugin symlink '%s' removed successfully (target directory preserved)\n", pluginName)
|
||||||
|
} else {
|
||||||
|
// For regular plugins, remove the entire directory
|
||||||
|
if err := os.RemoveAll(pluginDir); err != nil {
|
||||||
|
log.Fatal("Failed to remove plugin directory", "name", pluginName, err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Plugin '%s' removed successfully\n", pluginName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginUpdate(cmd *cobra.Command, args []string) {
|
||||||
|
ndpPath := args[0]
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
pkg, err := loadAndValidatePackage(ndpPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Package validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if plugin exists
|
||||||
|
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||||
|
if !utils.FileExists(targetDir) {
|
||||||
|
log.Fatal("Plugin not found", "name", pkg.Manifest.Name, "path", targetDir,
|
||||||
|
"use", "navidrome plugin install")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a backup of the existing plugin
|
||||||
|
backupDir := targetDir + ".bak." + time.Now().Format("20060102150405")
|
||||||
|
if err := os.Rename(targetDir, backupDir); err != nil {
|
||||||
|
log.Fatal("Failed to backup existing plugin", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the new package
|
||||||
|
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||||
|
// Restore backup if extraction failed
|
||||||
|
os.RemoveAll(targetDir)
|
||||||
|
_ = os.Rename(backupDir, targetDir) // Ignore error as we're already in a fatal path
|
||||||
|
log.Fatal("Plugin update failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove the backup
|
||||||
|
os.RemoveAll(backupDir)
|
||||||
|
|
||||||
|
fmt.Printf("Plugin '%s' updated to v%s successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginRefresh(cmd *cobra.Command, args []string) {
|
||||||
|
pluginName := args[0]
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Plugin validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
resolvedPath, isSymlink, err := resolvePluginPath(pluginDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to resolve plugin path", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isSymlink {
|
||||||
|
log.Debug("Processing symlinked plugin", "name", pluginName, "link", pluginDir, "target", resolvedPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Refreshing plugin '%s'...\n", pluginName)
|
||||||
|
|
||||||
|
// Get the plugin manager and refresh
|
||||||
|
mgr := GetPluginManager(cmd.Context())
|
||||||
|
log.Debug("Scanning plugins directory", "path", pluginsDir)
|
||||||
|
mgr.ScanPlugins()
|
||||||
|
|
||||||
|
log.Info("Waiting for plugin compilation to complete", "name", pluginName)
|
||||||
|
|
||||||
|
// Wait for compilation to complete
|
||||||
|
if err := mgr.EnsureCompiled(pluginName); err != nil {
|
||||||
|
log.Fatal("Failed to compile refreshed plugin", "name", pluginName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info("Plugin compilation completed successfully", "name", pluginName)
|
||||||
|
fmt.Printf("Plugin '%s' refreshed successfully\n", pluginName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluginDev(cmd *cobra.Command, args []string) {
|
||||||
|
sourcePath, err := filepath.Abs(args[0])
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Invalid path", "path", args[0], err)
|
||||||
|
}
|
||||||
|
pluginsDir := conf.Server.Plugins.Folder
|
||||||
|
|
||||||
|
// Validate source directory and manifest
|
||||||
|
if err := validateDevSource(sourcePath); err != nil {
|
||||||
|
log.Fatal("Source validation failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load manifest to get plugin name
|
||||||
|
manifest, err := plugins.LoadManifest(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Failed to load plugin manifest", "path", filepath.Join(sourcePath, "manifest.json"), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
pluginName := cmp.Or(manifest.Name, filepath.Base(sourcePath))
|
||||||
|
targetPath := filepath.Join(pluginsDir, pluginName)
|
||||||
|
|
||||||
|
// Handle existing target
|
||||||
|
if err := handleExistingTarget(targetPath, sourcePath); err != nil {
|
||||||
|
log.Fatal("Failed to handle existing target", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create target directory if needed
|
||||||
|
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
|
||||||
|
log.Fatal("Failed to create plugins directory", "path", filepath.Dir(targetPath), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the symlink
|
||||||
|
if err := os.Symlink(sourcePath, targetPath); err != nil {
|
||||||
|
log.Fatal("Failed to create symlink", "source", sourcePath, "target", targetPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Development symlink created: '%s' -> '%s'\n", targetPath, sourcePath)
|
||||||
|
fmt.Println("Plugin can be refreshed with: navidrome plugin refresh", pluginName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
|
||||||
|
func validateDevSource(sourcePath string) error {
|
||||||
|
sourceInfo, err := os.Stat(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("source folder not found: %s (%w)", sourcePath, err)
|
||||||
|
}
|
||||||
|
if !sourceInfo.IsDir() {
|
||||||
|
return fmt.Errorf("source path is not a directory: %s", sourcePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
manifestPath := filepath.Join(sourcePath, "manifest.json")
|
||||||
|
if !utils.FileExists(manifestPath) {
|
||||||
|
return fmt.Errorf("source folder missing manifest.json: %s", sourcePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleExistingTarget(targetPath, sourcePath string) error {
|
||||||
|
if !utils.FileExists(targetPath) {
|
||||||
|
return nil // Nothing to handle
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's already a symlink to our source
|
||||||
|
existingLink, err := os.Readlink(targetPath)
|
||||||
|
if err == nil && existingLink == sourcePath {
|
||||||
|
fmt.Printf("Symlink already exists and points to the correct source\n")
|
||||||
|
return fmt.Errorf("symlink already exists") // This will cause early return in caller
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle case where target exists but is not a symlink to our source
|
||||||
|
fmt.Printf("Target path '%s' already exists.\n", targetPath)
|
||||||
|
fmt.Print("Do you want to replace it? (y/N): ")
|
||||||
|
var response string
|
||||||
|
_, err = fmt.Scanln(&response)
|
||||||
|
if err != nil || strings.ToLower(response) != "y" {
|
||||||
|
if err != nil {
|
||||||
|
log.Debug("Error reading input, assuming 'no'", err)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("operation canceled")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove existing target
|
||||||
|
if err := os.RemoveAll(targetPath); err != nil {
|
||||||
|
return fmt.Errorf("failed to remove existing target %s: %w", targetPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ensurePluginDirPermissions(dir string) {
|
||||||
|
if err := os.Chmod(dir, pluginDirPermissions); err != nil {
|
||||||
|
log.Error("Failed to set plugin directory permissions", "dir", dir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply permissions to all files in the directory
|
||||||
|
entries, err := os.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to read plugin directory", "dir", dir, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
path := filepath.Join(dir, entry.Name())
|
||||||
|
info, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to stat file", "path", path, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
mode := os.FileMode(pluginFilePermissions) // Files
|
||||||
|
if info.IsDir() {
|
||||||
|
mode = os.FileMode(pluginDirPermissions) // Directories
|
||||||
|
ensurePluginDirPermissions(path) // Recursive
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Chmod(path, mode); err != nil {
|
||||||
|
log.Error("Failed to set file permissions", "path", path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateSHA256(filePath string) string {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open file for hashing", err)
|
||||||
|
return "N/A"
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
hasher := sha256.New()
|
||||||
|
if _, err := io.Copy(hasher, file); err != nil {
|
||||||
|
log.Error("Failed to calculate hash", err)
|
||||||
|
return "N/A"
|
||||||
|
}
|
||||||
|
|
||||||
|
return hex.EncodeToString(hasher.Sum(nil))
|
||||||
|
}
|
||||||
193
cmd/plugin_test.go
Normal file
193
cmd/plugin_test.go
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/conf/configtest"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Plugin CLI Commands", func() {
|
||||||
|
var tempDir string
|
||||||
|
var cmd *cobra.Command
|
||||||
|
var stdOut *os.File
|
||||||
|
var origStdout *os.File
|
||||||
|
var outReader *os.File
|
||||||
|
|
||||||
|
// Helper to create a test plugin with the given name and details
|
||||||
|
createTestPlugin := func(name, author, version string, capabilities []string) string {
|
||||||
|
pluginDir := filepath.Join(tempDir, name)
|
||||||
|
Expect(os.MkdirAll(pluginDir, 0755)).To(Succeed())
|
||||||
|
|
||||||
|
// Create a properly formatted capabilities JSON array
|
||||||
|
capabilitiesJSON := `"` + strings.Join(capabilities, `", "`) + `"`
|
||||||
|
|
||||||
|
manifest := `{
|
||||||
|
"name": "` + name + `",
|
||||||
|
"author": "` + author + `",
|
||||||
|
"version": "` + version + `",
|
||||||
|
"description": "Plugin for testing",
|
||||||
|
"website": "https://test.navidrome.org/` + name + `",
|
||||||
|
"capabilities": [` + capabilitiesJSON + `],
|
||||||
|
"permissions": {}
|
||||||
|
}`
|
||||||
|
|
||||||
|
Expect(os.WriteFile(filepath.Join(pluginDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// Create a dummy WASM file
|
||||||
|
wasmContent := []byte("dummy wasm content for testing")
|
||||||
|
Expect(os.WriteFile(filepath.Join(pluginDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||||
|
|
||||||
|
return pluginDir
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to execute a command and return captured output
|
||||||
|
captureOutput := func(reader io.Reader) string {
|
||||||
|
stdOut.Close()
|
||||||
|
outputBytes, err := io.ReadAll(reader)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
return string(outputBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
DeferCleanup(configtest.SetupConfig())
|
||||||
|
tempDir = GinkgoT().TempDir()
|
||||||
|
|
||||||
|
// Setup config
|
||||||
|
conf.Server.Plugins.Enabled = true
|
||||||
|
conf.Server.Plugins.Folder = tempDir
|
||||||
|
|
||||||
|
// Create a command for testing
|
||||||
|
cmd = &cobra.Command{Use: "test"}
|
||||||
|
|
||||||
|
// Setup stdout capture
|
||||||
|
origStdout = os.Stdout
|
||||||
|
var err error
|
||||||
|
outReader, stdOut, err = os.Pipe()
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
os.Stdout = stdOut
|
||||||
|
|
||||||
|
DeferCleanup(func() {
|
||||||
|
os.Stdout = origStdout
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.Stdout = origStdout
|
||||||
|
if stdOut != nil {
|
||||||
|
stdOut.Close()
|
||||||
|
}
|
||||||
|
if outReader != nil {
|
||||||
|
outReader.Close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Plugin list command", func() {
|
||||||
|
It("should list installed plugins", func() {
|
||||||
|
// Create test plugins
|
||||||
|
createTestPlugin("plugin1", "Test Author", "1.0.0", []string{"MetadataAgent"})
|
||||||
|
createTestPlugin("plugin2", "Another Author", "2.1.0", []string{"Scrobbler"})
|
||||||
|
|
||||||
|
// Execute command
|
||||||
|
pluginList(cmd, []string{})
|
||||||
|
|
||||||
|
// Verify output
|
||||||
|
output := captureOutput(outReader)
|
||||||
|
|
||||||
|
Expect(output).To(ContainSubstring("plugin1"))
|
||||||
|
Expect(output).To(ContainSubstring("Test Author"))
|
||||||
|
Expect(output).To(ContainSubstring("1.0.0"))
|
||||||
|
Expect(output).To(ContainSubstring("MetadataAgent"))
|
||||||
|
|
||||||
|
Expect(output).To(ContainSubstring("plugin2"))
|
||||||
|
Expect(output).To(ContainSubstring("Another Author"))
|
||||||
|
Expect(output).To(ContainSubstring("2.1.0"))
|
||||||
|
Expect(output).To(ContainSubstring("Scrobbler"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Plugin info command", func() {
|
||||||
|
It("should display information about an installed plugin", func() {
|
||||||
|
// Create test plugin with multiple capabilities
|
||||||
|
createTestPlugin("test-plugin", "Test Author", "1.0.0",
|
||||||
|
[]string{"MetadataAgent", "Scrobbler"})
|
||||||
|
|
||||||
|
// Execute command
|
||||||
|
pluginInfo(cmd, []string{"test-plugin"})
|
||||||
|
|
||||||
|
// Verify output
|
||||||
|
output := captureOutput(outReader)
|
||||||
|
|
||||||
|
Expect(output).To(ContainSubstring("Name: test-plugin"))
|
||||||
|
Expect(output).To(ContainSubstring("Author: Test Author"))
|
||||||
|
Expect(output).To(ContainSubstring("Version: 1.0.0"))
|
||||||
|
Expect(output).To(ContainSubstring("Description: Plugin for testing"))
|
||||||
|
Expect(output).To(ContainSubstring("Capabilities: MetadataAgent, Scrobbler"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Plugin remove command", func() {
|
||||||
|
It("should remove a regular plugin directory", func() {
|
||||||
|
// Create test plugin
|
||||||
|
pluginDir := createTestPlugin("regular-plugin", "Test Author", "1.0.0",
|
||||||
|
[]string{"MetadataAgent"})
|
||||||
|
|
||||||
|
// Execute command
|
||||||
|
pluginRemove(cmd, []string{"regular-plugin"})
|
||||||
|
|
||||||
|
// Verify output
|
||||||
|
output := captureOutput(outReader)
|
||||||
|
Expect(output).To(ContainSubstring("Plugin 'regular-plugin' removed successfully"))
|
||||||
|
|
||||||
|
// Verify directory is actually removed
|
||||||
|
_, err := os.Stat(pluginDir)
|
||||||
|
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should remove only the symlink for a development plugin", func() {
|
||||||
|
// Create a real source directory
|
||||||
|
sourceDir := filepath.Join(GinkgoT().TempDir(), "dev-plugin-source")
|
||||||
|
Expect(os.MkdirAll(sourceDir, 0755)).To(Succeed())
|
||||||
|
|
||||||
|
manifest := `{
|
||||||
|
"name": "dev-plugin",
|
||||||
|
"author": "Dev Author",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "Development plugin for testing",
|
||||||
|
"website": "https://test.navidrome.org/dev-plugin",
|
||||||
|
"capabilities": ["Scrobbler"],
|
||||||
|
"permissions": {}
|
||||||
|
}`
|
||||||
|
Expect(os.WriteFile(filepath.Join(sourceDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// Create a dummy WASM file
|
||||||
|
wasmContent := []byte("dummy wasm content for testing")
|
||||||
|
Expect(os.WriteFile(filepath.Join(sourceDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||||
|
|
||||||
|
// Create a symlink in the plugins directory
|
||||||
|
symlinkPath := filepath.Join(tempDir, "dev-plugin")
|
||||||
|
Expect(os.Symlink(sourceDir, symlinkPath)).To(Succeed())
|
||||||
|
|
||||||
|
// Execute command
|
||||||
|
pluginRemove(cmd, []string{"dev-plugin"})
|
||||||
|
|
||||||
|
// Verify output
|
||||||
|
output := captureOutput(outReader)
|
||||||
|
Expect(output).To(ContainSubstring("Development plugin symlink 'dev-plugin' removed successfully"))
|
||||||
|
Expect(output).To(ContainSubstring("target directory preserved"))
|
||||||
|
|
||||||
|
// Verify the symlink is removed but source directory exists
|
||||||
|
_, err := os.Lstat(symlinkPath)
|
||||||
|
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||||
|
|
||||||
|
_, err = os.Stat(sourceDir)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
404
cmd/root.go
Normal file
404
cmd/root.go
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5/middleware"
|
||||||
|
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/resources"
|
||||||
|
"github.com/navidrome/navidrome/scanner"
|
||||||
|
"github.com/navidrome/navidrome/scheduler"
|
||||||
|
"github.com/navidrome/navidrome/server/backgrounds"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
"golang.org/x/sync/errgroup"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
cfgFile string
|
||||||
|
noBanner bool
|
||||||
|
|
||||||
|
rootCmd = &cobra.Command{
|
||||||
|
Use: "navidrome",
|
||||||
|
Short: "Navidrome is a self-hosted music server and streamer",
|
||||||
|
Long: `Navidrome is a self-hosted music server and streamer.
|
||||||
|
Complete documentation is available at https://www.navidrome.org/docs`,
|
||||||
|
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
preRun()
|
||||||
|
},
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runNavidrome(cmd.Context())
|
||||||
|
},
|
||||||
|
PostRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
postRun()
|
||||||
|
},
|
||||||
|
Version: consts.Version,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||||
|
func Execute() {
|
||||||
|
ctx, cancel := mainContext(context.Background())
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||||
|
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func preRun() {
|
||||||
|
if !noBanner {
|
||||||
|
println(resources.Banner())
|
||||||
|
}
|
||||||
|
conf.Load(noBanner)
|
||||||
|
}
|
||||||
|
|
||||||
|
func postRun() {
|
||||||
|
log.Info("Navidrome stopped, bye.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
|
||||||
|
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||||
|
// it will cancel the context and exit gracefully.
|
||||||
|
func runNavidrome(ctx context.Context) {
|
||||||
|
defer db.Init(ctx)()
|
||||||
|
|
||||||
|
g, ctx := errgroup.WithContext(ctx)
|
||||||
|
g.Go(startServer(ctx))
|
||||||
|
g.Go(startSignaller(ctx))
|
||||||
|
g.Go(startScheduler(ctx))
|
||||||
|
g.Go(startPlaybackServer(ctx))
|
||||||
|
g.Go(schedulePeriodicBackup(ctx))
|
||||||
|
g.Go(startInsightsCollector(ctx))
|
||||||
|
g.Go(scheduleDBOptimizer(ctx))
|
||||||
|
g.Go(startPluginManager(ctx))
|
||||||
|
g.Go(runInitialScan(ctx))
|
||||||
|
if conf.Server.Scanner.Enabled {
|
||||||
|
g.Go(startScanWatcher(ctx))
|
||||||
|
g.Go(schedulePeriodicScan(ctx))
|
||||||
|
} else {
|
||||||
|
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := g.Wait(); err != nil {
|
||||||
|
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mainContext returns a context that is cancelled when the process receives a signal to exit.
|
||||||
|
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||||
|
return signal.NotifyContext(ctx,
|
||||||
|
os.Interrupt,
|
||||||
|
syscall.SIGHUP,
|
||||||
|
syscall.SIGTERM,
|
||||||
|
syscall.SIGABRT,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||||
|
func startServer(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
a := CreateServer()
|
||||||
|
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter(ctx))
|
||||||
|
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||||
|
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||||
|
if conf.Server.LastFM.Enabled {
|
||||||
|
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||||
|
}
|
||||||
|
if conf.Server.ListenBrainz.Enabled {
|
||||||
|
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
|
||||||
|
}
|
||||||
|
if conf.Server.Prometheus.Enabled {
|
||||||
|
p := CreatePrometheus()
|
||||||
|
// blocking call because takes <100ms but useful if fails
|
||||||
|
p.WriteInitialMetrics(ctx)
|
||||||
|
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
|
||||||
|
}
|
||||||
|
if conf.Server.DevEnableProfiler {
|
||||||
|
a.MountRouter("Profiling", "/debug", middleware.Profiler())
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
|
||||||
|
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
|
||||||
|
}
|
||||||
|
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||||
|
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
schedule := conf.Server.Scanner.Schedule
|
||||||
|
if schedule == "" {
|
||||||
|
log.Info(ctx, "Periodic scan is DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
s := CreateScanner(ctx)
|
||||||
|
schedulerInstance := scheduler.GetInstance()
|
||||||
|
|
||||||
|
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||||
|
_, err := schedulerInstance.Add(schedule, func() {
|
||||||
|
_, err := s.ScanAll(ctx, false)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error executing periodic scan", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||||
|
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// runInitialScan runs an initial scan of the music library if needed.
|
||||||
|
func runInitialScan(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
ds := CreateDataStore()
|
||||||
|
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pidHasChanged, err := pidHashChanged(ds)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||||
|
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||||
|
if scanNeeded {
|
||||||
|
s := CreateScanner(ctx)
|
||||||
|
switch {
|
||||||
|
case fullScanRequired == "1":
|
||||||
|
log.Warn(ctx, "Full scan required after migration")
|
||||||
|
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||||
|
case pidHasChanged:
|
||||||
|
log.Warn(ctx, "PID config changed, performing full scan")
|
||||||
|
fullScanRequired = "1"
|
||||||
|
case inProgress:
|
||||||
|
log.Warn(ctx, "Resuming interrupted scan")
|
||||||
|
default:
|
||||||
|
log.Info("Executing initial scan")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = s.ScanAll(ctx, fullScanRequired == "1")
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Scan failed", err)
|
||||||
|
} else {
|
||||||
|
log.Info(ctx, "Scan completed")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Debug(ctx, "Initial scan not needed")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func startScanWatcher(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
if conf.Server.Scanner.WatcherWait == 0 {
|
||||||
|
log.Debug("Folder watcher is DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
w := CreateScanWatcher(ctx)
|
||||||
|
err := w.Run(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error starting watcher", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
schedule := conf.Server.Backup.Schedule
|
||||||
|
if schedule == "" {
|
||||||
|
log.Info(ctx, "Periodic backup is DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schedulerInstance := scheduler.GetInstance()
|
||||||
|
|
||||||
|
log.Info("Scheduling periodic backup", "schedule", schedule)
|
||||||
|
_, err := schedulerInstance.Add(schedule, func() {
|
||||||
|
start := time.Now()
|
||||||
|
path, err := db.Backup(ctx)
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
|
||||||
|
|
||||||
|
count, err := db.Prune(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error pruning database", "error", err)
|
||||||
|
} else if count > 0 {
|
||||||
|
log.Info(ctx, "Successfully pruned old files", "count", count)
|
||||||
|
} else {
|
||||||
|
log.Info(ctx, "No backups pruned")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func scheduleDBOptimizer(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
|
||||||
|
schedulerInstance := scheduler.GetInstance()
|
||||||
|
_, err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
|
||||||
|
if scanner.IsScanning() {
|
||||||
|
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
db.Optimize(ctx)
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// startScheduler starts the Navidrome scheduler, which is used to run periodic tasks.
|
||||||
|
func startScheduler(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
log.Info(ctx, "Starting scheduler")
|
||||||
|
schedulerInstance := scheduler.GetInstance()
|
||||||
|
schedulerInstance.Run(ctx)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
|
||||||
|
func startInsightsCollector(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
if !conf.Server.EnableInsightsCollector {
|
||||||
|
log.Info(ctx, "Insight Collector is DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Starting Insight Collector")
|
||||||
|
select {
|
||||||
|
case <-time.After(conf.Server.DevInsightsInitialDelay):
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ic := CreateInsights()
|
||||||
|
ic.Run(ctx)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// startPlaybackServer starts the Navidrome playback server, if configured.
|
||||||
|
// It is responsible for the Jukebox functionality
|
||||||
|
func startPlaybackServer(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
if !conf.Server.Jukebox.Enabled {
|
||||||
|
log.Debug("Jukebox is DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Starting Jukebox service")
|
||||||
|
playbackInstance := GetPlaybackServer()
|
||||||
|
return playbackInstance.Run(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// startPluginManager starts the plugin manager, if configured.
|
||||||
|
func startPluginManager(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
if !conf.Server.Plugins.Enabled {
|
||||||
|
log.Debug("Plugins are DISABLED")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Starting plugin manager")
|
||||||
|
// Get the manager instance and scan for plugins
|
||||||
|
manager := GetPluginManager(ctx)
|
||||||
|
manager.ScanPlugins()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Implement some struct tags to map flags to viper
|
||||||
|
func init() {
|
||||||
|
cobra.OnInitialize(func() {
|
||||||
|
conf.InitConfig(cfgFile, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
rootCmd.PersistentFlags().StringVarP(&cfgFile, "configfile", "c", "", `config file (default "./navidrome.toml")`)
|
||||||
|
rootCmd.PersistentFlags().BoolVarP(&noBanner, "nobanner", "n", false, `don't show banner`)
|
||||||
|
rootCmd.PersistentFlags().String("musicfolder", viper.GetString("musicfolder"), "folder where your music is stored")
|
||||||
|
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
|
||||||
|
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
|
||||||
|
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
|
||||||
|
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
|
||||||
|
|
||||||
|
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
|
||||||
|
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
|
||||||
|
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
|
||||||
|
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
|
||||||
|
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
|
||||||
|
|
||||||
|
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
|
||||||
|
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
|
||||||
|
rootCmd.Flags().String("baseurl", viper.GetString("baseurl"), "base URL to configure Navidrome behind a proxy (ex: /music or http://my.server.com)")
|
||||||
|
rootCmd.Flags().String("tlscert", viper.GetString("tlscert"), "optional path to a TLS cert file (enables HTTPS listening)")
|
||||||
|
rootCmd.Flags().String("unixsocketperm", viper.GetString("unixsocketperm"), "optional file permission for the unix socket")
|
||||||
|
rootCmd.Flags().String("tlskey", viper.GetString("tlskey"), "optional path to a TLS key file (enables HTTPS listening)")
|
||||||
|
|
||||||
|
rootCmd.Flags().Duration("sessiontimeout", viper.GetDuration("sessiontimeout"), "how long Navidrome will wait before closing web ui idle sessions")
|
||||||
|
rootCmd.Flags().Duration("scaninterval", viper.GetDuration("scaninterval"), "how frequently to scan for changes in your music library")
|
||||||
|
rootCmd.Flags().String("uiloginbackgroundurl", viper.GetString("uiloginbackgroundurl"), "URL to a backaground image used in the Login page")
|
||||||
|
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
|
||||||
|
rootCmd.Flags().Bool("enabletranscodingcancellation", viper.GetBool("enabletranscodingcancellation"), "enables transcoding context cancellation")
|
||||||
|
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
|
||||||
|
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
|
||||||
|
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
|
||||||
|
rootCmd.Flags().Bool("autoimportplaylists", viper.GetBool("autoimportplaylists"), "enable/disable .m3u playlist auto-import`")
|
||||||
|
|
||||||
|
rootCmd.Flags().Bool("prometheus.enabled", viper.GetBool("prometheus.enabled"), "enable/disable prometheus metrics endpoint`")
|
||||||
|
rootCmd.Flags().String("prometheus.metricspath", viper.GetString("prometheus.metricspath"), "http endpoint for prometheus metrics")
|
||||||
|
|
||||||
|
_ = viper.BindPFlag("address", rootCmd.Flags().Lookup("address"))
|
||||||
|
_ = viper.BindPFlag("port", rootCmd.Flags().Lookup("port"))
|
||||||
|
_ = viper.BindPFlag("tlscert", rootCmd.Flags().Lookup("tlscert"))
|
||||||
|
_ = viper.BindPFlag("unixsocketperm", rootCmd.Flags().Lookup("unixsocketperm"))
|
||||||
|
_ = viper.BindPFlag("tlskey", rootCmd.Flags().Lookup("tlskey"))
|
||||||
|
_ = viper.BindPFlag("baseurl", rootCmd.Flags().Lookup("baseurl"))
|
||||||
|
|
||||||
|
_ = viper.BindPFlag("sessiontimeout", rootCmd.Flags().Lookup("sessiontimeout"))
|
||||||
|
_ = viper.BindPFlag("scaninterval", rootCmd.Flags().Lookup("scaninterval"))
|
||||||
|
_ = viper.BindPFlag("uiloginbackgroundurl", rootCmd.Flags().Lookup("uiloginbackgroundurl"))
|
||||||
|
|
||||||
|
_ = viper.BindPFlag("prometheus.enabled", rootCmd.Flags().Lookup("prometheus.enabled"))
|
||||||
|
_ = viper.BindPFlag("prometheus.metricspath", rootCmd.Flags().Lookup("prometheus.metricspath"))
|
||||||
|
|
||||||
|
_ = viper.BindPFlag("enabletranscodingconfig", rootCmd.Flags().Lookup("enabletranscodingconfig"))
|
||||||
|
_ = viper.BindPFlag("enabletranscodingcancellation", rootCmd.Flags().Lookup("enabletranscodingcancellation"))
|
||||||
|
_ = viper.BindPFlag("transcodingcachesize", rootCmd.Flags().Lookup("transcodingcachesize"))
|
||||||
|
_ = viper.BindPFlag("imagecachesize", rootCmd.Flags().Lookup("imagecachesize"))
|
||||||
|
}
|
||||||
96
cmd/scan.go
Normal file
96
cmd/scan.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/gob"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/core"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/persistence"
|
||||||
|
"github.com/navidrome/navidrome/scanner"
|
||||||
|
"github.com/navidrome/navidrome/utils/pl"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
fullScan bool
|
||||||
|
subprocess bool
|
||||||
|
targets []string
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||||
|
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||||
|
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
|
||||||
|
rootCmd.AddCommand(scanCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
var scanCmd = &cobra.Command{
|
||||||
|
Use: "scan",
|
||||||
|
Short: "Scan music folder",
|
||||||
|
Long: "Scan music folder for updates",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runScanner(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||||
|
for status := range pl.ReadOrDone(ctx, progress) {
|
||||||
|
if status.Warning != "" {
|
||||||
|
log.Warn(ctx, "Scan warning", "error", status.Warning)
|
||||||
|
}
|
||||||
|
if status.Error != "" {
|
||||||
|
log.Error(ctx, "Scan error", "error", status.Error)
|
||||||
|
}
|
||||||
|
// Discard the progress status, we only care about errors
|
||||||
|
}
|
||||||
|
|
||||||
|
if fullScan {
|
||||||
|
log.Info("Finished full rescan")
|
||||||
|
} else {
|
||||||
|
log.Info("Finished rescan")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||||
|
encoder := gob.NewEncoder(os.Stdout)
|
||||||
|
for status := range pl.ReadOrDone(ctx, progress) {
|
||||||
|
err := encoder.Encode(status)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Failed to encode status", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runScanner(ctx context.Context) {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
defer db.Db().Close()
|
||||||
|
ds := persistence.New(sqlDB)
|
||||||
|
pls := core.NewPlaylists(ds)
|
||||||
|
|
||||||
|
// Parse targets if provided
|
||||||
|
var scanTargets []model.ScanTarget
|
||||||
|
if len(targets) > 0 {
|
||||||
|
var err error
|
||||||
|
scanTargets, err = model.ParseTargets(targets)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to parse targets", err)
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Scanning specific folders", "numTargets", len(scanTargets))
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, err := scanner.CallScan(ctx, ds, pls, fullScan, scanTargets)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to scan", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for the scanner to finish
|
||||||
|
if subprocess {
|
||||||
|
trackScanAsSubprocess(ctx, progress)
|
||||||
|
} else {
|
||||||
|
trackScanInteractively(ctx, progress)
|
||||||
|
}
|
||||||
|
}
|
||||||
14
cmd/signaller_nounix.go
Normal file
14
cmd/signaller_nounix.go
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
//go:build windows || plan9
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Windows and Plan9 don't support SIGUSR1, so we don't need to start a signaler
|
||||||
|
func startSignaller(ctx context.Context) func() error {
|
||||||
|
return func() error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
40
cmd/signaller_unix.go
Normal file
40
cmd/signaller_unix.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
//go:build !windows && !plan9
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const triggerScanSignal = syscall.SIGUSR1
|
||||||
|
|
||||||
|
func startSignaller(ctx context.Context) func() error {
|
||||||
|
log.Info(ctx, "Starting signaler")
|
||||||
|
scanner := CreateScanner(ctx)
|
||||||
|
|
||||||
|
return func() error {
|
||||||
|
var sigChan = make(chan os.Signal, 1)
|
||||||
|
signal.Notify(sigChan, triggerScanSignal)
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case sig := <-sigChan:
|
||||||
|
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
|
||||||
|
start := time.Now()
|
||||||
|
_, err := scanner.ScanAll(ctx, false)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error scanning", err)
|
||||||
|
}
|
||||||
|
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
267
cmd/svc.go
Normal file
267
cmd/svc.go
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/kardianos/service"
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
svcStatusLabels = map[service.Status]string{
|
||||||
|
service.StatusUnknown: "Unknown",
|
||||||
|
service.StatusStopped: "Stopped",
|
||||||
|
service.StatusRunning: "Running",
|
||||||
|
}
|
||||||
|
|
||||||
|
installUser string
|
||||||
|
workingDirectory string
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
svcCmd.AddCommand(buildInstallCmd())
|
||||||
|
svcCmd.AddCommand(buildUninstallCmd())
|
||||||
|
svcCmd.AddCommand(buildStartCmd())
|
||||||
|
svcCmd.AddCommand(buildStopCmd())
|
||||||
|
svcCmd.AddCommand(buildStatusCmd())
|
||||||
|
svcCmd.AddCommand(buildExecuteCmd())
|
||||||
|
rootCmd.AddCommand(svcCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
var svcCmd = &cobra.Command{
|
||||||
|
Use: "service",
|
||||||
|
Aliases: []string{"svc"},
|
||||||
|
Short: "Manage Navidrome as a service",
|
||||||
|
Long: fmt.Sprintf("Manage Navidrome as a service, using the OS service manager (%s)", service.Platform()),
|
||||||
|
Run: runServiceCmd,
|
||||||
|
}
|
||||||
|
|
||||||
|
type svcControl struct {
|
||||||
|
ctx context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *svcControl) Start(service.Service) error {
|
||||||
|
p.done = make(chan struct{})
|
||||||
|
p.ctx, p.cancel = context.WithCancel(context.Background())
|
||||||
|
go func() {
|
||||||
|
runNavidrome(p.ctx)
|
||||||
|
close(p.done)
|
||||||
|
}()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *svcControl) Stop(service.Service) error {
|
||||||
|
log.Info("Stopping service")
|
||||||
|
p.cancel()
|
||||||
|
select {
|
||||||
|
case <-p.done:
|
||||||
|
log.Info("Service stopped gracefully")
|
||||||
|
case <-time.After(10 * time.Second):
|
||||||
|
log.Error("Service did not stop in time. Killing it.")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var svcInstance = sync.OnceValue(func() service.Service {
|
||||||
|
options := make(service.KeyValue)
|
||||||
|
options["Restart"] = "on-failure"
|
||||||
|
options["SuccessExitStatus"] = "1 2 8 SIGKILL"
|
||||||
|
options["UserService"] = false
|
||||||
|
options["LogDirectory"] = conf.Server.DataFolder
|
||||||
|
options["SystemdScript"] = systemdScript
|
||||||
|
if conf.Server.LogFile != "" {
|
||||||
|
options["LogOutput"] = false
|
||||||
|
} else {
|
||||||
|
options["LogOutput"] = true
|
||||||
|
options["LogDirectory"] = conf.Server.DataFolder
|
||||||
|
}
|
||||||
|
svcConfig := &service.Config{
|
||||||
|
UserName: installUser,
|
||||||
|
Name: "navidrome",
|
||||||
|
DisplayName: "Navidrome",
|
||||||
|
Description: "Your Personal Streaming Service",
|
||||||
|
Dependencies: []string{
|
||||||
|
"After=remote-fs.target network.target",
|
||||||
|
},
|
||||||
|
WorkingDirectory: executablePath(),
|
||||||
|
Option: options,
|
||||||
|
}
|
||||||
|
arguments := []string{"service", "execute"}
|
||||||
|
if conf.Server.ConfigFile != "" {
|
||||||
|
arguments = append(arguments, "-c", conf.Server.ConfigFile)
|
||||||
|
}
|
||||||
|
svcConfig.Arguments = arguments
|
||||||
|
|
||||||
|
prg := &svcControl{}
|
||||||
|
svc, err := service.New(prg, svcConfig)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return svc
|
||||||
|
})
|
||||||
|
|
||||||
|
func runServiceCmd(cmd *cobra.Command, _ []string) {
|
||||||
|
_ = cmd.Help()
|
||||||
|
}
|
||||||
|
|
||||||
|
func executablePath() string {
|
||||||
|
if workingDirectory != "" {
|
||||||
|
return workingDirectory
|
||||||
|
}
|
||||||
|
|
||||||
|
ex, err := os.Executable()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return filepath.Dir(ex)
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildInstallCmd() *cobra.Command {
|
||||||
|
runInstallCmd := func(_ *cobra.Command, _ []string) {
|
||||||
|
var err error
|
||||||
|
println("Installing service with:")
|
||||||
|
println(" working directory: " + executablePath())
|
||||||
|
println(" music folder: " + conf.Server.MusicFolder)
|
||||||
|
println(" data folder: " + conf.Server.DataFolder)
|
||||||
|
if conf.Server.LogFile != "" {
|
||||||
|
println(" log file: " + conf.Server.LogFile)
|
||||||
|
} else {
|
||||||
|
println(" logs folder: " + conf.Server.DataFolder)
|
||||||
|
}
|
||||||
|
if cfgFile != "" {
|
||||||
|
conf.Server.ConfigFile, err = filepath.Abs(cfgFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
println(" config file: " + conf.Server.ConfigFile)
|
||||||
|
}
|
||||||
|
err = svcInstance().Install()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
println("Service installed. Use 'navidrome svc start' to start it.")
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := &cobra.Command{
|
||||||
|
Use: "install",
|
||||||
|
Short: "Install Navidrome service.",
|
||||||
|
Run: runInstallCmd,
|
||||||
|
}
|
||||||
|
cmd.Flags().StringVarP(&installUser, "user", "u", "", "user to run service")
|
||||||
|
cmd.Flags().StringVarP(&workingDirectory, "working-directory", "w", "", "working directory of service")
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildUninstallCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: "uninstall",
|
||||||
|
Short: "Uninstall Navidrome service. Does not delete the music or data folders",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
err := svcInstance().Uninstall()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
println("Service uninstalled. Music and data folders are still intact.")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildStartCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: "start",
|
||||||
|
Short: "Start Navidrome service",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
err := svcInstance().Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
println("Service started. Use 'navidrome svc status' to check its status.")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildStopCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: "stop",
|
||||||
|
Short: "Stop Navidrome service",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
err := svcInstance().Stop()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
println("Service stopped. Use 'navidrome svc status' to check its status.")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildStatusCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: "status",
|
||||||
|
Short: "Show Navidrome service status",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
status, err := svcInstance().Status()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Printf("Navidrome is %s.\n", svcStatusLabels[status])
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildExecuteCmd() *cobra.Command {
|
||||||
|
return &cobra.Command{
|
||||||
|
Use: "execute",
|
||||||
|
Short: "Run navidrome as a service in the foreground (it is very unlikely you want to run this, you are better off running just navidrome)",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
err := svcInstance().Run()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const systemdScript = `[Unit]
|
||||||
|
Description={{.Description}}
|
||||||
|
ConditionFileIsExecutable={{.Path|cmdEscape}}
|
||||||
|
{{range $i, $dep := .Dependencies}}
|
||||||
|
{{$dep}} {{end}}
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
StartLimitInterval=5
|
||||||
|
StartLimitBurst=10
|
||||||
|
ExecStart={{.Path|cmdEscape}}{{range .Arguments}} {{.|cmd}}{{end}}
|
||||||
|
{{if .WorkingDirectory}}WorkingDirectory={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||||
|
{{if .UserName}}User={{.UserName}}{{end}}
|
||||||
|
{{if .Restart}}Restart={{.Restart}}{{end}}
|
||||||
|
{{if .SuccessExitStatus}}SuccessExitStatus={{.SuccessExitStatus}}{{end}}
|
||||||
|
TimeoutStopSec=20
|
||||||
|
RestartSec=120
|
||||||
|
EnvironmentFile=-/etc/sysconfig/{{.Name}}
|
||||||
|
|
||||||
|
DevicePolicy=closed
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
PrivateTmp=yes
|
||||||
|
ProtectControlGroups=yes
|
||||||
|
ProtectKernelModules=yes
|
||||||
|
ProtectKernelTunables=yes
|
||||||
|
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||||
|
RestrictNamespaces=yes
|
||||||
|
RestrictRealtime=yes
|
||||||
|
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @setuid @swap
|
||||||
|
{{if .WorkingDirectory}}ReadWritePaths={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||||
|
ProtectSystem=full
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
`
|
||||||
477
cmd/user.go
Normal file
477
cmd/user.go
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/csv"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Masterminds/squirrel"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/term"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
email string
|
||||||
|
libraryIds []int
|
||||||
|
name string
|
||||||
|
|
||||||
|
removeEmail bool
|
||||||
|
removeName bool
|
||||||
|
setAdmin bool
|
||||||
|
setPassword bool
|
||||||
|
setRegularUser bool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(userRoot)
|
||||||
|
|
||||||
|
userCreateCommand.Flags().StringVarP(&userID, "username", "u", "", "username")
|
||||||
|
|
||||||
|
userCreateCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||||
|
userCreateCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries. If empty, the user can access all libraries. This is incompatible with admin, as admin can always access all libraries")
|
||||||
|
|
||||||
|
userCreateCommand.Flags().BoolVarP(&setAdmin, "admin", "a", false, "If set, make the user an admin. This user will have access to every library")
|
||||||
|
userCreateCommand.Flags().StringVar(&name, "name", "", "New user's name (this is separate from username used to log in)")
|
||||||
|
|
||||||
|
_ = userCreateCommand.MarkFlagRequired("username")
|
||||||
|
|
||||||
|
userRoot.AddCommand(userCreateCommand)
|
||||||
|
|
||||||
|
userDeleteCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||||
|
_ = userDeleteCommand.MarkFlagRequired("user")
|
||||||
|
userRoot.AddCommand(userDeleteCommand)
|
||||||
|
|
||||||
|
userEditCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&setAdmin, "set-admin", false, "If set, make the user an admin")
|
||||||
|
userEditCommand.Flags().BoolVar(&setRegularUser, "set-regular", false, "If set, make the user a non-admin")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("set-admin", "set-regular")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&removeEmail, "remove-email", false, "If set, clear the user's email")
|
||||||
|
userEditCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("email", "remove-email")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&removeName, "remove-name", false, "If set, clear the user's name")
|
||||||
|
userEditCommand.Flags().StringVar(&name, "name", "", "New user name (this is separate from username used to log in)")
|
||||||
|
userEditCommand.MarkFlagsMutuallyExclusive("name", "remove-name")
|
||||||
|
|
||||||
|
userEditCommand.Flags().BoolVar(&setPassword, "set-password", false, "If set, the user's new password will be prompted on the CLI")
|
||||||
|
|
||||||
|
userEditCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries by id")
|
||||||
|
|
||||||
|
_ = userEditCommand.MarkFlagRequired("user")
|
||||||
|
userRoot.AddCommand(userEditCommand)
|
||||||
|
|
||||||
|
userListCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||||
|
userRoot.AddCommand(userListCommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
userRoot = &cobra.Command{
|
||||||
|
Use: "user",
|
||||||
|
Short: "Administer users",
|
||||||
|
Long: "Create, delete, list, or update users",
|
||||||
|
}
|
||||||
|
|
||||||
|
userCreateCommand = &cobra.Command{
|
||||||
|
Use: "create",
|
||||||
|
Aliases: []string{"c"},
|
||||||
|
Short: "Create a new user",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runCreateUser(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userDeleteCommand = &cobra.Command{
|
||||||
|
Use: "delete",
|
||||||
|
Aliases: []string{"d"},
|
||||||
|
Short: "Deletes an existing user",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runDeleteUser(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userEditCommand = &cobra.Command{
|
||||||
|
Use: "edit",
|
||||||
|
Aliases: []string{"e"},
|
||||||
|
Short: "Edit a user",
|
||||||
|
Long: "Edit the password, admin status, and/or library access",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runUserEdit(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
userListCommand = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "List users",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
runUserList(cmd.Context())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func promptPassword() string {
|
||||||
|
for {
|
||||||
|
fmt.Print("Enter new password (press enter with no password to cancel): ")
|
||||||
|
// This cast is necessary for some platforms
|
||||||
|
password, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error getting password", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print("\nConfirm new password (press enter with no password to cancel): ")
|
||||||
|
confirmation, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Error getting password confirmation", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear the line.
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
pass := string(password)
|
||||||
|
confirm := string(confirmation)
|
||||||
|
|
||||||
|
if pass == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if pass == confirm {
|
||||||
|
return pass
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Password and password confirmation do not match")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func libraryError(libraries model.Libraries) error {
|
||||||
|
ids := make([]int, len(libraries))
|
||||||
|
for idx, library := range libraries {
|
||||||
|
ids[idx] = library.ID
|
||||||
|
}
|
||||||
|
return fmt.Errorf("not all available libraries found. Requested ids: %v, Found libraries: %v", libraryIds, ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runCreateUser(ctx context.Context) {
|
||||||
|
password := promptPassword()
|
||||||
|
if password == "" {
|
||||||
|
log.Fatal("Empty password provided, user creation cancelled")
|
||||||
|
}
|
||||||
|
|
||||||
|
user := model.User{
|
||||||
|
UserName: userID,
|
||||||
|
Email: email,
|
||||||
|
Name: name,
|
||||||
|
IsAdmin: setAdmin,
|
||||||
|
NewPassword: password,
|
||||||
|
}
|
||||||
|
|
||||||
|
if user.Name == "" {
|
||||||
|
user.Name = userID
|
||||||
|
}
|
||||||
|
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
err := ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
existingUser, err := tx.User(ctx).FindByUsername(userID)
|
||||||
|
if existingUser != nil {
|
||||||
|
return fmt.Errorf("existing user '%s'", userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
return fmt.Errorf("failed to check existing username: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraryIds) > 0 && !setAdmin {
|
||||||
|
user.Libraries, err = tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(user.Libraries) != len(libraryIds) {
|
||||||
|
return libraryError(user.Libraries)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
user.Libraries, err = tx.Library(ctx).GetAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tx.User(ctx).Put(&user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
updatedIds := make([]int, len(user.Libraries))
|
||||||
|
for idx, lib := range user.Libraries {
|
||||||
|
updatedIds[idx] = lib.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info(ctx, "Successfully created user", "id", user.ID, "username", user.UserName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDeleteUser(ctx context.Context) {
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var user *model.User
|
||||||
|
|
||||||
|
err = ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
count, err := tx.User(ctx).CountAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if count == 1 {
|
||||||
|
return errors.New("refusing to delete the last user")
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err = getUser(ctx, userID, tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.User(ctx).Delete(user.ID)
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to delete user", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info(ctx, "Deleted user", "username", user.UserName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runUserEdit(ctx context.Context) {
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var user *model.User
|
||||||
|
changes := []string{}
|
||||||
|
|
||||||
|
err = ds.WithTx(func(tx model.DataStore) error {
|
||||||
|
var newLibraries model.Libraries
|
||||||
|
|
||||||
|
user, err = getUser(ctx, userID, tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraryIds) > 0 && !setAdmin {
|
||||||
|
libraries, err := tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(libraries) != len(libraryIds) {
|
||||||
|
return libraryError(libraries)
|
||||||
|
}
|
||||||
|
|
||||||
|
newLibraries = libraries
|
||||||
|
changes = append(changes, "updated library ids")
|
||||||
|
}
|
||||||
|
|
||||||
|
if setAdmin && !user.IsAdmin {
|
||||||
|
libraries, err := tx.Library(ctx).GetAll()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
user.IsAdmin = true
|
||||||
|
user.Libraries = libraries
|
||||||
|
changes = append(changes, "set admin")
|
||||||
|
|
||||||
|
newLibraries = libraries
|
||||||
|
}
|
||||||
|
|
||||||
|
if setRegularUser && user.IsAdmin {
|
||||||
|
user.IsAdmin = false
|
||||||
|
changes = append(changes, "set regular user")
|
||||||
|
}
|
||||||
|
|
||||||
|
if setPassword {
|
||||||
|
password := promptPassword()
|
||||||
|
|
||||||
|
if password != "" {
|
||||||
|
user.NewPassword = password
|
||||||
|
changes = append(changes, "updated password")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if email != "" && email != user.Email {
|
||||||
|
user.Email = email
|
||||||
|
changes = append(changes, "updated email")
|
||||||
|
} else if removeEmail && user.Email != "" {
|
||||||
|
user.Email = ""
|
||||||
|
changes = append(changes, "removed email")
|
||||||
|
}
|
||||||
|
|
||||||
|
if name != "" && name != user.Name {
|
||||||
|
user.Name = name
|
||||||
|
changes = append(changes, "updated name")
|
||||||
|
} else if removeName && user.Name != "" {
|
||||||
|
user.Name = ""
|
||||||
|
changes = append(changes, "removed name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(changes) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := tx.User(ctx).Put(user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(newLibraries) > 0 {
|
||||||
|
updatedIds := make([]int, len(newLibraries))
|
||||||
|
for idx, lib := range newLibraries {
|
||||||
|
updatedIds[idx] = lib.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
err := tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to update user", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(changes) == 0 {
|
||||||
|
log.Info(ctx, "No changes for user", "user", user.UserName)
|
||||||
|
} else {
|
||||||
|
log.Info(ctx, "Updated user", "user", user.UserName, "changes", strings.Join(changes, ", "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type displayLibrary struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type displayUser struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
Admin bool `json:"admin"`
|
||||||
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
|
UpdatedAt time.Time `json:"updatedAt"`
|
||||||
|
LastAccess *time.Time `json:"lastAccess"`
|
||||||
|
LastLogin *time.Time `json:"lastLogin"`
|
||||||
|
Libraries []displayLibrary `json:"libraries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func runUserList(ctx context.Context) {
|
||||||
|
if outputFormat != "csv" && outputFormat != "json" {
|
||||||
|
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
ds, ctx := getAdminContext(ctx)
|
||||||
|
|
||||||
|
users, err := ds.User(ctx).ReadAll()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(ctx, "Failed to retrieve users", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
userList := users.(model.Users)
|
||||||
|
|
||||||
|
if outputFormat == "csv" {
|
||||||
|
w := csv.NewWriter(os.Stdout)
|
||||||
|
_ = w.Write([]string{
|
||||||
|
"user id",
|
||||||
|
"username",
|
||||||
|
"user's name",
|
||||||
|
"user email",
|
||||||
|
"admin",
|
||||||
|
"created at",
|
||||||
|
"updated at",
|
||||||
|
"last access",
|
||||||
|
"last login",
|
||||||
|
"libraries",
|
||||||
|
})
|
||||||
|
for _, user := range userList {
|
||||||
|
paths := make([]string, len(user.Libraries))
|
||||||
|
|
||||||
|
for idx, library := range user.Libraries {
|
||||||
|
paths[idx] = fmt.Sprintf("%d:%s", library.ID, library.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastAccess, lastLogin string
|
||||||
|
|
||||||
|
if user.LastAccessAt != nil {
|
||||||
|
lastAccess = user.LastAccessAt.Format(time.RFC3339Nano)
|
||||||
|
} else {
|
||||||
|
lastAccess = "never"
|
||||||
|
}
|
||||||
|
|
||||||
|
if user.LastLoginAt != nil {
|
||||||
|
lastLogin = user.LastLoginAt.Format(time.RFC3339Nano)
|
||||||
|
} else {
|
||||||
|
lastLogin = "never"
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = w.Write([]string{
|
||||||
|
user.ID,
|
||||||
|
user.UserName,
|
||||||
|
user.Name,
|
||||||
|
user.Email,
|
||||||
|
strconv.FormatBool(user.IsAdmin),
|
||||||
|
user.CreatedAt.Format(time.RFC3339Nano),
|
||||||
|
user.UpdatedAt.Format(time.RFC3339Nano),
|
||||||
|
lastAccess,
|
||||||
|
lastLogin,
|
||||||
|
fmt.Sprintf("'%s'", strings.Join(paths, "|")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
} else {
|
||||||
|
users := make([]displayUser, len(userList))
|
||||||
|
for idx, user := range userList {
|
||||||
|
paths := make([]displayLibrary, len(user.Libraries))
|
||||||
|
|
||||||
|
for idx, library := range user.Libraries {
|
||||||
|
paths[idx].ID = library.ID
|
||||||
|
paths[idx].Path = library.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
users[idx].Id = user.ID
|
||||||
|
users[idx].Username = user.UserName
|
||||||
|
users[idx].Name = user.Name
|
||||||
|
users[idx].Email = user.Email
|
||||||
|
users[idx].Admin = user.IsAdmin
|
||||||
|
users[idx].CreatedAt = user.CreatedAt
|
||||||
|
users[idx].UpdatedAt = user.UpdatedAt
|
||||||
|
users[idx].LastAccess = user.LastAccessAt
|
||||||
|
users[idx].LastLogin = user.LastLoginAt
|
||||||
|
users[idx].Libraries = paths
|
||||||
|
}
|
||||||
|
|
||||||
|
j, _ := json.Marshal(users)
|
||||||
|
fmt.Printf("%s\n", j)
|
||||||
|
}
|
||||||
|
}
|
||||||
42
cmd/utils.go
Normal file
42
cmd/utils.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/core/auth"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/request"
|
||||||
|
"github.com/navidrome/navidrome/persistence"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getAdminContext(ctx context.Context) (model.DataStore, context.Context) {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
ds := persistence.New(sqlDB)
|
||||||
|
ctx = auth.WithAdminUser(ctx, ds)
|
||||||
|
u, _ := request.UserFrom(ctx)
|
||||||
|
if !u.IsAdmin {
|
||||||
|
log.Fatal(ctx, "There must be at least one admin user to run this command.")
|
||||||
|
}
|
||||||
|
return ds, ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func getUser(ctx context.Context, id string, ds model.DataStore) (*model.User, error) {
|
||||||
|
user, err := ds.User(ctx).FindByUsername(id)
|
||||||
|
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
return nil, fmt.Errorf("finding user by name: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if errors.Is(err, model.ErrNotFound) {
|
||||||
|
user, err = ds.User(ctx).Get(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("finding user by id: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return user, nil
|
||||||
|
}
|
||||||
211
cmd/wire_gen.go
Normal file
211
cmd/wire_gen.go
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
// Code generated by Wire. DO NOT EDIT.
|
||||||
|
|
||||||
|
//go:generate go run -mod=mod github.com/google/wire/cmd/wire gen -tags "netgo"
|
||||||
|
//go:build !wireinject
|
||||||
|
// +build !wireinject
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/google/wire"
|
||||||
|
"github.com/navidrome/navidrome/core"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||||
|
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||||
|
"github.com/navidrome/navidrome/core/artwork"
|
||||||
|
"github.com/navidrome/navidrome/core/external"
|
||||||
|
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||||
|
"github.com/navidrome/navidrome/core/metrics"
|
||||||
|
"github.com/navidrome/navidrome/core/playback"
|
||||||
|
"github.com/navidrome/navidrome/core/scrobbler"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/persistence"
|
||||||
|
"github.com/navidrome/navidrome/plugins"
|
||||||
|
"github.com/navidrome/navidrome/scanner"
|
||||||
|
"github.com/navidrome/navidrome/server"
|
||||||
|
"github.com/navidrome/navidrome/server/events"
|
||||||
|
"github.com/navidrome/navidrome/server/nativeapi"
|
||||||
|
"github.com/navidrome/navidrome/server/public"
|
||||||
|
"github.com/navidrome/navidrome/server/subsonic"
|
||||||
|
)
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Injectors from wire_injectors.go:
|
||||||
|
|
||||||
|
func CreateDataStore() model.DataStore {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
return dataStore
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateServer() *server.Server {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
broker := events.GetBroker()
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
insights := metrics.GetInstance(dataStore, manager)
|
||||||
|
serverServer := server.New(dataStore, broker, insights)
|
||||||
|
return serverServer
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
share := core.NewShare(dataStore)
|
||||||
|
playlists := core.NewPlaylists(dataStore)
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
insights := metrics.GetInstance(dataStore, manager)
|
||||||
|
fileCache := artwork.GetImageCache()
|
||||||
|
fFmpeg := ffmpeg.New()
|
||||||
|
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||||
|
provider := external.NewProvider(dataStore, agentsAgents)
|
||||||
|
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||||
|
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||||
|
broker := events.GetBroker()
|
||||||
|
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||||
|
watcher := scanner.GetWatcher(dataStore, modelScanner)
|
||||||
|
library := core.NewLibrary(dataStore, modelScanner, watcher, broker)
|
||||||
|
maintenance := core.NewMaintenance(dataStore)
|
||||||
|
router := nativeapi.New(dataStore, share, playlists, insights, library, maintenance)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
fileCache := artwork.GetImageCache()
|
||||||
|
fFmpeg := ffmpeg.New()
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||||
|
provider := external.NewProvider(dataStore, agentsAgents)
|
||||||
|
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||||
|
transcodingCache := core.GetTranscodingCache()
|
||||||
|
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||||
|
share := core.NewShare(dataStore)
|
||||||
|
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||||
|
players := core.NewPlayers(dataStore)
|
||||||
|
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||||
|
broker := events.GetBroker()
|
||||||
|
playlists := core.NewPlaylists(dataStore)
|
||||||
|
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||||
|
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
|
||||||
|
playbackServer := playback.GetInstance(dataStore)
|
||||||
|
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, modelScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreatePublicRouter() *public.Router {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
fileCache := artwork.GetImageCache()
|
||||||
|
fFmpeg := ffmpeg.New()
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||||
|
provider := external.NewProvider(dataStore, agentsAgents)
|
||||||
|
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||||
|
transcodingCache := core.GetTranscodingCache()
|
||||||
|
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||||
|
share := core.NewShare(dataStore)
|
||||||
|
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||||
|
router := public.New(dataStore, artworkArtwork, mediaStreamer, share, archiver)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateLastFMRouter() *lastfm.Router {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
router := lastfm.NewRouter(dataStore)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
router := listenbrainz.NewRouter(dataStore)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateInsights() metrics.Insights {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
insights := metrics.GetInstance(dataStore, manager)
|
||||||
|
return insights
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreatePrometheus() metrics.Metrics {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
return metricsMetrics
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateScanner(ctx context.Context) model.Scanner {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
fileCache := artwork.GetImageCache()
|
||||||
|
fFmpeg := ffmpeg.New()
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||||
|
provider := external.NewProvider(dataStore, agentsAgents)
|
||||||
|
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||||
|
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||||
|
broker := events.GetBroker()
|
||||||
|
playlists := core.NewPlaylists(dataStore)
|
||||||
|
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||||
|
return modelScanner
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
fileCache := artwork.GetImageCache()
|
||||||
|
fFmpeg := ffmpeg.New()
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||||
|
provider := external.NewProvider(dataStore, agentsAgents)
|
||||||
|
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||||
|
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||||
|
broker := events.GetBroker()
|
||||||
|
playlists := core.NewPlaylists(dataStore)
|
||||||
|
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||||
|
watcher := scanner.GetWatcher(dataStore, modelScanner)
|
||||||
|
return watcher
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPlaybackServer() playback.PlaybackServer {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
playbackServer := playback.GetInstance(dataStore)
|
||||||
|
return playbackServer
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPluginManager() plugins.Manager {
|
||||||
|
sqlDB := db.Db()
|
||||||
|
dataStore := persistence.New(sqlDB)
|
||||||
|
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||||
|
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||||
|
return manager
|
||||||
|
}
|
||||||
|
|
||||||
|
// wire_injectors.go:
|
||||||
|
|
||||||
|
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
|
||||||
|
|
||||||
|
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||||
|
manager := getPluginManager()
|
||||||
|
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||||
|
return manager
|
||||||
|
}
|
||||||
133
cmd/wire_injectors.go
Normal file
133
cmd/wire_injectors.go
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
//go:build wireinject
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/google/wire"
|
||||||
|
"github.com/navidrome/navidrome/core"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||||
|
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||||
|
"github.com/navidrome/navidrome/core/artwork"
|
||||||
|
"github.com/navidrome/navidrome/core/metrics"
|
||||||
|
"github.com/navidrome/navidrome/core/playback"
|
||||||
|
"github.com/navidrome/navidrome/core/scrobbler"
|
||||||
|
"github.com/navidrome/navidrome/db"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/persistence"
|
||||||
|
"github.com/navidrome/navidrome/plugins"
|
||||||
|
"github.com/navidrome/navidrome/scanner"
|
||||||
|
"github.com/navidrome/navidrome/server"
|
||||||
|
"github.com/navidrome/navidrome/server/events"
|
||||||
|
"github.com/navidrome/navidrome/server/nativeapi"
|
||||||
|
"github.com/navidrome/navidrome/server/public"
|
||||||
|
"github.com/navidrome/navidrome/server/subsonic"
|
||||||
|
)
|
||||||
|
|
||||||
|
var allProviders = wire.NewSet(
|
||||||
|
core.Set,
|
||||||
|
artwork.Set,
|
||||||
|
server.New,
|
||||||
|
subsonic.New,
|
||||||
|
nativeapi.New,
|
||||||
|
public.New,
|
||||||
|
persistence.New,
|
||||||
|
lastfm.NewRouter,
|
||||||
|
listenbrainz.NewRouter,
|
||||||
|
events.GetBroker,
|
||||||
|
scanner.New,
|
||||||
|
scanner.GetWatcher,
|
||||||
|
plugins.GetManager,
|
||||||
|
metrics.GetPrometheusInstance,
|
||||||
|
db.Db,
|
||||||
|
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
|
||||||
|
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
|
||||||
|
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
|
||||||
|
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreateDataStore() model.DataStore {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateServer() *server.Server {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreatePublicRouter() *public.Router {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateLastFMRouter() *lastfm.Router {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateInsights() metrics.Insights {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreatePrometheus() metrics.Metrics {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateScanner(ctx context.Context) model.Scanner {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPlaybackServer() playback.PlaybackServer {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPluginManager() plugins.Manager {
|
||||||
|
panic(wire.Build(
|
||||||
|
allProviders,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||||
|
manager := getPluginManager()
|
||||||
|
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||||
|
return manager
|
||||||
|
}
|
||||||
4
conf/buildtags/buildtags.go
Normal file
4
conf/buildtags/buildtags.go
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
package buildtags
|
||||||
|
|
||||||
|
// This file is left intentionally empty. It is used to make sure the package is not empty, in the case all
|
||||||
|
// required build tags are disabled.
|
||||||
11
conf/buildtags/netgo.go
Normal file
11
conf/buildtags/netgo.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
//go:build netgo
|
||||||
|
|
||||||
|
package buildtags
|
||||||
|
|
||||||
|
// NOTICE: This file was created to force the inclusion of the `netgo` tag when compiling the project.
|
||||||
|
// If the tag is not included, the compilation will fail because this variable won't be defined, and the `main.go`
|
||||||
|
// file requires it.
|
||||||
|
|
||||||
|
// Why this tag is required? See https://github.com/navidrome/navidrome/issues/700
|
||||||
|
|
||||||
|
var NETGO = true
|
||||||
10
conf/configtest/configtest.go
Normal file
10
conf/configtest/configtest.go
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
package configtest
|
||||||
|
|
||||||
|
import "github.com/navidrome/navidrome/conf"
|
||||||
|
|
||||||
|
func SetupConfig() func() {
|
||||||
|
oldValues := *conf.Server
|
||||||
|
return func() {
|
||||||
|
conf.Server = &oldValues
|
||||||
|
}
|
||||||
|
}
|
||||||
709
conf/configuration.go
Normal file
709
conf/configuration.go
Normal file
@@ -0,0 +1,709 @@
|
|||||||
|
package conf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/bmatcuk/doublestar/v4"
|
||||||
|
"github.com/go-viper/encoding/ini"
|
||||||
|
"github.com/kr/pretty"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/utils/run"
|
||||||
|
"github.com/robfig/cron/v3"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
type configOptions struct {
|
||||||
|
ConfigFile string
|
||||||
|
Address string
|
||||||
|
Port int
|
||||||
|
UnixSocketPerm string
|
||||||
|
MusicFolder string
|
||||||
|
DataFolder string
|
||||||
|
CacheFolder string
|
||||||
|
DbPath string
|
||||||
|
LogLevel string
|
||||||
|
LogFile string
|
||||||
|
SessionTimeout time.Duration
|
||||||
|
BaseURL string
|
||||||
|
BasePath string
|
||||||
|
BaseHost string
|
||||||
|
BaseScheme string
|
||||||
|
TLSCert string
|
||||||
|
TLSKey string
|
||||||
|
UILoginBackgroundURL string
|
||||||
|
UIWelcomeMessage string
|
||||||
|
MaxSidebarPlaylists int
|
||||||
|
EnableTranscodingConfig bool
|
||||||
|
EnableTranscodingCancellation bool
|
||||||
|
EnableDownloads bool
|
||||||
|
EnableExternalServices bool
|
||||||
|
EnableInsightsCollector bool
|
||||||
|
EnableMediaFileCoverArt bool
|
||||||
|
TranscodingCacheSize string
|
||||||
|
ImageCacheSize string
|
||||||
|
AlbumPlayCountMode string
|
||||||
|
EnableArtworkPrecache bool
|
||||||
|
AutoImportPlaylists bool
|
||||||
|
DefaultPlaylistPublicVisibility bool
|
||||||
|
PlaylistsPath string
|
||||||
|
SmartPlaylistRefreshDelay time.Duration
|
||||||
|
AutoTranscodeDownload bool
|
||||||
|
DefaultDownsamplingFormat string
|
||||||
|
SearchFullString bool
|
||||||
|
RecentlyAddedByModTime bool
|
||||||
|
PreferSortTags bool
|
||||||
|
IgnoredArticles string
|
||||||
|
IndexGroups string
|
||||||
|
FFmpegPath string
|
||||||
|
MPVPath string
|
||||||
|
MPVCmdTemplate string
|
||||||
|
CoverArtPriority string
|
||||||
|
CoverJpegQuality int
|
||||||
|
ArtistArtPriority string
|
||||||
|
LyricsPriority string
|
||||||
|
EnableGravatar bool
|
||||||
|
EnableFavourites bool
|
||||||
|
EnableStarRating bool
|
||||||
|
EnableUserEditing bool
|
||||||
|
EnableSharing bool
|
||||||
|
ShareURL string
|
||||||
|
DefaultShareExpiration time.Duration
|
||||||
|
DefaultDownloadableShare bool
|
||||||
|
DefaultTheme string
|
||||||
|
DefaultLanguage string
|
||||||
|
DefaultUIVolume int
|
||||||
|
EnableReplayGain bool
|
||||||
|
EnableCoverAnimation bool
|
||||||
|
EnableNowPlaying bool
|
||||||
|
GATrackingID string
|
||||||
|
EnableLogRedacting bool
|
||||||
|
AuthRequestLimit int
|
||||||
|
AuthWindowLength time.Duration
|
||||||
|
PasswordEncryptionKey string
|
||||||
|
ExtAuth extAuthOptions
|
||||||
|
Plugins pluginsOptions
|
||||||
|
PluginConfig map[string]map[string]string
|
||||||
|
HTTPSecurityHeaders secureOptions `json:",omitzero"`
|
||||||
|
Prometheus prometheusOptions `json:",omitzero"`
|
||||||
|
Scanner scannerOptions `json:",omitzero"`
|
||||||
|
Jukebox jukeboxOptions `json:",omitzero"`
|
||||||
|
Backup backupOptions `json:",omitzero"`
|
||||||
|
PID pidOptions `json:",omitzero"`
|
||||||
|
Inspect inspectOptions `json:",omitzero"`
|
||||||
|
Subsonic subsonicOptions `json:",omitzero"`
|
||||||
|
LastFM lastfmOptions `json:",omitzero"`
|
||||||
|
Spotify spotifyOptions `json:",omitzero"`
|
||||||
|
Deezer deezerOptions `json:",omitzero"`
|
||||||
|
ListenBrainz listenBrainzOptions `json:",omitzero"`
|
||||||
|
EnableScrobbleHistory bool
|
||||||
|
Tags map[string]TagConf `json:",omitempty"`
|
||||||
|
Agents string
|
||||||
|
Meilisearch meilisearchOptions `json:",omitzero"`
|
||||||
|
|
||||||
|
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||||
|
DevLogLevels map[string]string `json:",omitempty"`
|
||||||
|
DevLogSourceLine bool
|
||||||
|
DevEnableProfiler bool
|
||||||
|
DevAutoCreateAdminPassword string
|
||||||
|
DevAutoLoginUsername string
|
||||||
|
DevActivityPanel bool
|
||||||
|
DevActivityPanelUpdateRate time.Duration
|
||||||
|
DevSidebarPlaylists bool
|
||||||
|
DevShowArtistPage bool
|
||||||
|
DevUIShowConfig bool
|
||||||
|
DevNewEventStream bool
|
||||||
|
DevOffsetOptimize int
|
||||||
|
DevArtworkMaxRequests int
|
||||||
|
DevArtworkThrottleBacklogLimit int
|
||||||
|
DevArtworkThrottleBacklogTimeout time.Duration
|
||||||
|
DevArtistInfoTimeToLive time.Duration
|
||||||
|
DevAlbumInfoTimeToLive time.Duration
|
||||||
|
DevExternalScanner bool
|
||||||
|
DevScannerThreads uint
|
||||||
|
DevSelectiveWatcher bool
|
||||||
|
DevInsightsInitialDelay time.Duration
|
||||||
|
DevEnablePlayerInsights bool
|
||||||
|
DevEnablePluginsInsights bool
|
||||||
|
DevPluginCompilationTimeout time.Duration
|
||||||
|
DevExternalArtistFetchMultiplier float64
|
||||||
|
DevOptimizeDB bool
|
||||||
|
DevPreserveUnicodeInExternalCalls bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type meilisearchOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
Host string
|
||||||
|
ApiKey string
|
||||||
|
}
|
||||||
|
|
||||||
|
type scannerOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
Schedule string
|
||||||
|
WatcherWait time.Duration
|
||||||
|
ScanOnStartup bool
|
||||||
|
Extractor string
|
||||||
|
ArtistJoiner string
|
||||||
|
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
|
||||||
|
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
|
||||||
|
FollowSymlinks bool // Whether to follow symlinks when scanning directories
|
||||||
|
PurgeMissing string // Values: "never", "always", "full"
|
||||||
|
}
|
||||||
|
|
||||||
|
type subsonicOptions struct {
|
||||||
|
AppendSubtitle bool
|
||||||
|
ArtistParticipations bool
|
||||||
|
DefaultReportRealPath bool
|
||||||
|
LegacyClients string
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagConf struct {
|
||||||
|
Ignore bool `yaml:"ignore" json:",omitempty"`
|
||||||
|
Aliases []string `yaml:"aliases" json:",omitempty"`
|
||||||
|
Type string `yaml:"type" json:",omitempty"`
|
||||||
|
MaxLength int `yaml:"maxLength" json:",omitempty"`
|
||||||
|
Split []string `yaml:"split" json:",omitempty"`
|
||||||
|
Album bool `yaml:"album" json:",omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type lastfmOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
ApiKey string
|
||||||
|
Secret string
|
||||||
|
Language string
|
||||||
|
ScrobbleFirstArtistOnly bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type spotifyOptions struct {
|
||||||
|
ID string
|
||||||
|
Secret string
|
||||||
|
}
|
||||||
|
|
||||||
|
type deezerOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
Language string
|
||||||
|
}
|
||||||
|
|
||||||
|
type listenBrainzOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
BaseURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
type secureOptions struct {
|
||||||
|
CustomFrameOptionsValue string
|
||||||
|
}
|
||||||
|
|
||||||
|
type prometheusOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
MetricsPath string
|
||||||
|
Password string
|
||||||
|
}
|
||||||
|
|
||||||
|
type AudioDeviceDefinition []string
|
||||||
|
|
||||||
|
type jukeboxOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
Devices []AudioDeviceDefinition
|
||||||
|
Default string
|
||||||
|
AdminOnly bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type backupOptions struct {
|
||||||
|
Count int
|
||||||
|
Path string
|
||||||
|
Schedule string
|
||||||
|
}
|
||||||
|
|
||||||
|
type pidOptions struct {
|
||||||
|
Track string
|
||||||
|
Album string
|
||||||
|
}
|
||||||
|
|
||||||
|
type inspectOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
MaxRequests int
|
||||||
|
BacklogLimit int
|
||||||
|
BacklogTimeout int
|
||||||
|
}
|
||||||
|
|
||||||
|
type pluginsOptions struct {
|
||||||
|
Enabled bool
|
||||||
|
Folder string
|
||||||
|
CacheSize string
|
||||||
|
}
|
||||||
|
|
||||||
|
type extAuthOptions struct {
|
||||||
|
TrustedSources string
|
||||||
|
UserHeader string
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
Server = &configOptions{}
|
||||||
|
hooks []func()
|
||||||
|
)
|
||||||
|
|
||||||
|
func LoadFromFile(confFile string) {
|
||||||
|
viper.SetConfigFile(confFile)
|
||||||
|
err := viper.ReadInConfig()
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
Load(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Load(noConfigDump bool) {
|
||||||
|
parseIniFileConfiguration()
|
||||||
|
|
||||||
|
// Map deprecated options to their new names for backwards compatibility
|
||||||
|
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||||
|
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||||
|
|
||||||
|
err := viper.Unmarshal(&Server)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.MkdirAll(Server.DataFolder, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Server.CacheFolder == "" {
|
||||||
|
Server.CacheFolder = filepath.Join(Server.DataFolder, "cache")
|
||||||
|
}
|
||||||
|
err = os.MkdirAll(Server.CacheFolder, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Server.Plugins.Enabled {
|
||||||
|
if Server.Plugins.Folder == "" {
|
||||||
|
Server.Plugins.Folder = filepath.Join(Server.DataFolder, "plugins")
|
||||||
|
}
|
||||||
|
err = os.MkdirAll(Server.Plugins.Folder, 0700)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating plugins path:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Server.ConfigFile = viper.GetViper().ConfigFileUsed()
|
||||||
|
if Server.DbPath == "" {
|
||||||
|
Server.DbPath = filepath.Join(Server.DataFolder, consts.DefaultDbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Server.Backup.Path != "" {
|
||||||
|
err = os.MkdirAll(Server.Backup.Path, os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating backup path:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out := os.Stderr
|
||||||
|
if Server.LogFile != "" {
|
||||||
|
out, err = os.OpenFile(Server.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Error opening log file %s: %s\n", Server.LogFile, err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
log.SetOutput(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.SetLevelString(Server.LogLevel)
|
||||||
|
log.SetLogLevels(Server.DevLogLevels)
|
||||||
|
log.SetLogSourceLine(Server.DevLogSourceLine)
|
||||||
|
log.SetRedacting(Server.EnableLogRedacting)
|
||||||
|
|
||||||
|
err = run.Sequentially(
|
||||||
|
validateScanSchedule,
|
||||||
|
validateBackupSchedule,
|
||||||
|
validatePlaylistsPath,
|
||||||
|
validatePurgeMissingOption,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Server.BaseURL != "" {
|
||||||
|
u, err := url.Parse(Server.BaseURL)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Invalid BaseURL:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
Server.BasePath = u.Path
|
||||||
|
u.Path = ""
|
||||||
|
u.RawQuery = ""
|
||||||
|
Server.BaseHost = u.Host
|
||||||
|
Server.BaseScheme = u.Scheme
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log configuration source
|
||||||
|
if Server.ConfigFile != "" {
|
||||||
|
log.Info("Loaded configuration", "file", Server.ConfigFile)
|
||||||
|
} else {
|
||||||
|
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print current configuration if log level is Debug
|
||||||
|
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||||
|
prettyConf := pretty.Sprintf("Configuration: %# v", Server)
|
||||||
|
if Server.EnableLogRedacting {
|
||||||
|
prettyConf = log.Redact(prettyConf)
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintln(out, prettyConf)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !Server.EnableExternalServices {
|
||||||
|
disableExternalServices()
|
||||||
|
}
|
||||||
|
|
||||||
|
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||||
|
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||||
|
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||||
|
}
|
||||||
|
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||||
|
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||||
|
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||||
|
logDeprecatedOptions("ReverseProxyWhitelist", "ReverseProxyUserHeader")
|
||||||
|
|
||||||
|
// Call init hooks
|
||||||
|
for _, hook := range hooks {
|
||||||
|
hook()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func logDeprecatedOptions(options ...string) {
|
||||||
|
for _, option := range options {
|
||||||
|
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||||
|
if os.Getenv(envVar) != "" {
|
||||||
|
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||||
|
}
|
||||||
|
if viper.InConfig(option) {
|
||||||
|
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
|
||||||
|
// the config has been read by viper, but before unmarshalling it into the Config struct.
|
||||||
|
func mapDeprecatedOption(legacyName, newName string) {
|
||||||
|
if viper.IsSet(legacyName) {
|
||||||
|
viper.Set(newName, viper.Get(legacyName))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
|
||||||
|
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
|
||||||
|
// section into the root level.
|
||||||
|
func parseIniFileConfiguration() {
|
||||||
|
cfgFile := viper.ConfigFileUsed()
|
||||||
|
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
|
||||||
|
var iniConfig map[string]interface{}
|
||||||
|
err := viper.Unmarshal(&iniConfig)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
cfg, ok := iniConfig["default"].(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config: missing [default] section:", iniConfig)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
err = viper.MergeConfigMap(cfg)
|
||||||
|
if err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func disableExternalServices() {
|
||||||
|
log.Info("All external integrations are DISABLED!")
|
||||||
|
Server.EnableInsightsCollector = false
|
||||||
|
Server.LastFM.Enabled = false
|
||||||
|
Server.Spotify.ID = ""
|
||||||
|
Server.Deezer.Enabled = false
|
||||||
|
Server.ListenBrainz.Enabled = false
|
||||||
|
Server.Agents = ""
|
||||||
|
if Server.UILoginBackgroundURL == consts.DefaultUILoginBackgroundURL {
|
||||||
|
Server.UILoginBackgroundURL = consts.DefaultUILoginBackgroundURLOffline
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func validatePlaylistsPath() error {
|
||||||
|
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||||
|
_, err := doublestar.Match(path, "")
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validatePurgeMissingOption() error {
|
||||||
|
allowedValues := []string{consts.PurgeMissingNever, consts.PurgeMissingAlways, consts.PurgeMissingFull}
|
||||||
|
valid := false
|
||||||
|
for _, v := range allowedValues {
|
||||||
|
if v == Server.Scanner.PurgeMissing {
|
||||||
|
valid = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !valid {
|
||||||
|
err := fmt.Errorf("invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
|
||||||
|
log.Error(err.Error())
|
||||||
|
Server.Scanner.PurgeMissing = consts.PurgeMissingNever
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateScanSchedule() error {
|
||||||
|
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
|
||||||
|
Server.Scanner.Schedule = ""
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateBackupSchedule() error {
|
||||||
|
if Server.Backup.Path == "" || Server.Backup.Schedule == "" || Server.Backup.Count == 0 {
|
||||||
|
Server.Backup.Schedule = ""
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateSchedule(schedule, field string) (string, error) {
|
||||||
|
if _, err := time.ParseDuration(schedule); err == nil {
|
||||||
|
schedule = "@every " + schedule
|
||||||
|
}
|
||||||
|
c := cron.New()
|
||||||
|
id, err := c.AddFunc(schedule, func() {})
|
||||||
|
if err != nil {
|
||||||
|
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
|
||||||
|
} else {
|
||||||
|
c.Remove(id)
|
||||||
|
}
|
||||||
|
return schedule, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddHook is used to register initialization code that should run as soon as the config is loaded
|
||||||
|
func AddHook(hook func()) {
|
||||||
|
hooks = append(hooks, hook)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setViperDefaults() {
|
||||||
|
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
||||||
|
viper.SetDefault("cachefolder", "")
|
||||||
|
viper.SetDefault("datafolder", ".")
|
||||||
|
viper.SetDefault("loglevel", "info")
|
||||||
|
viper.SetDefault("logfile", "")
|
||||||
|
viper.SetDefault("address", "0.0.0.0")
|
||||||
|
viper.SetDefault("port", 4533)
|
||||||
|
viper.SetDefault("unixsocketperm", "0660")
|
||||||
|
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
|
||||||
|
viper.SetDefault("baseurl", "")
|
||||||
|
viper.SetDefault("tlscert", "")
|
||||||
|
viper.SetDefault("tlskey", "")
|
||||||
|
viper.SetDefault("uiloginbackgroundurl", consts.DefaultUILoginBackgroundURL)
|
||||||
|
viper.SetDefault("uiwelcomemessage", "")
|
||||||
|
viper.SetDefault("maxsidebarplaylists", consts.DefaultMaxSidebarPlaylists)
|
||||||
|
viper.SetDefault("enabletranscodingconfig", false)
|
||||||
|
viper.SetDefault("enabletranscodingcancellation", false)
|
||||||
|
viper.SetDefault("transcodingcachesize", "100MB")
|
||||||
|
viper.SetDefault("imagecachesize", "100MB")
|
||||||
|
viper.SetDefault("albumplaycountmode", consts.AlbumPlayCountModeAbsolute)
|
||||||
|
viper.SetDefault("enableartworkprecache", true)
|
||||||
|
viper.SetDefault("autoimportplaylists", true)
|
||||||
|
viper.SetDefault("defaultplaylistpublicvisibility", false)
|
||||||
|
viper.SetDefault("playlistspath", "")
|
||||||
|
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
|
||||||
|
viper.SetDefault("enabledownloads", true)
|
||||||
|
viper.SetDefault("enableexternalservices", true)
|
||||||
|
viper.SetDefault("enablemediafilecoverart", true)
|
||||||
|
viper.SetDefault("autotranscodedownload", false)
|
||||||
|
viper.SetDefault("defaultdownsamplingformat", consts.DefaultDownsamplingFormat)
|
||||||
|
viper.SetDefault("searchfullstring", false)
|
||||||
|
viper.SetDefault("recentlyaddedbymodtime", false)
|
||||||
|
viper.SetDefault("prefersorttags", false)
|
||||||
|
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
|
||||||
|
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
|
||||||
|
viper.SetDefault("ffmpegpath", "")
|
||||||
|
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display %f --input-ipc-server=%s")
|
||||||
|
viper.SetDefault("coverartpriority", "cover.*, folder.*, front.*, embedded, external")
|
||||||
|
viper.SetDefault("coverjpegquality", 75)
|
||||||
|
viper.SetDefault("artistartpriority", "artist.*, album/artist.*, external")
|
||||||
|
viper.SetDefault("lyricspriority", ".lrc,.txt,embedded")
|
||||||
|
viper.SetDefault("enablegravatar", false)
|
||||||
|
viper.SetDefault("enablefavourites", true)
|
||||||
|
viper.SetDefault("enablestarrating", true)
|
||||||
|
viper.SetDefault("enableuserediting", true)
|
||||||
|
viper.SetDefault("defaulttheme", "Dark")
|
||||||
|
viper.SetDefault("defaultlanguage", "")
|
||||||
|
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
|
||||||
|
viper.SetDefault("enablereplaygain", true)
|
||||||
|
viper.SetDefault("enablecoveranimation", true)
|
||||||
|
viper.SetDefault("enablenowplaying", true)
|
||||||
|
viper.SetDefault("enablesharing", false)
|
||||||
|
viper.SetDefault("shareurl", "")
|
||||||
|
viper.SetDefault("defaultshareexpiration", 8760*time.Hour)
|
||||||
|
viper.SetDefault("defaultdownloadableshare", false)
|
||||||
|
viper.SetDefault("gatrackingid", "")
|
||||||
|
viper.SetDefault("enableinsightscollector", true)
|
||||||
|
viper.SetDefault("enablelogredacting", true)
|
||||||
|
viper.SetDefault("authrequestlimit", 5)
|
||||||
|
viper.SetDefault("authwindowlength", 20*time.Second)
|
||||||
|
viper.SetDefault("passwordencryptionkey", "")
|
||||||
|
viper.SetDefault("extauth.userheader", "Remote-User")
|
||||||
|
viper.SetDefault("extauth.trustedsources", "")
|
||||||
|
viper.SetDefault("prometheus.enabled", false)
|
||||||
|
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
|
||||||
|
viper.SetDefault("prometheus.password", "")
|
||||||
|
viper.SetDefault("jukebox.enabled", false)
|
||||||
|
viper.SetDefault("jukebox.devices", []AudioDeviceDefinition{})
|
||||||
|
viper.SetDefault("jukebox.default", "")
|
||||||
|
viper.SetDefault("jukebox.adminonly", true)
|
||||||
|
viper.SetDefault("scanner.enabled", true)
|
||||||
|
viper.SetDefault("scanner.schedule", "0")
|
||||||
|
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
|
||||||
|
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
|
||||||
|
viper.SetDefault("scanner.scanonstartup", true)
|
||||||
|
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
|
||||||
|
viper.SetDefault("scanner.genreseparators", "")
|
||||||
|
viper.SetDefault("scanner.groupalbumreleases", false)
|
||||||
|
viper.SetDefault("scanner.followsymlinks", true)
|
||||||
|
viper.SetDefault("scanner.purgemissing", consts.PurgeMissingNever)
|
||||||
|
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||||
|
viper.SetDefault("subsonic.artistparticipations", false)
|
||||||
|
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||||
|
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||||
|
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||||
|
viper.SetDefault("lastfm.enabled", true)
|
||||||
|
viper.SetDefault("lastfm.language", "en")
|
||||||
|
viper.SetDefault("lastfm.apikey", "")
|
||||||
|
viper.SetDefault("lastfm.secret", "")
|
||||||
|
viper.SetDefault("lastfm.scrobblefirstartistonly", false)
|
||||||
|
viper.SetDefault("spotify.id", "")
|
||||||
|
viper.SetDefault("spotify.secret", "")
|
||||||
|
viper.SetDefault("deezer.enabled", true)
|
||||||
|
viper.SetDefault("deezer.language", "en")
|
||||||
|
viper.SetDefault("listenbrainz.enabled", true)
|
||||||
|
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||||
|
viper.SetDefault("enablescrobblehistory", true)
|
||||||
|
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||||
|
viper.SetDefault("backup.path", "")
|
||||||
|
viper.SetDefault("backup.schedule", "")
|
||||||
|
viper.SetDefault("backup.count", 0)
|
||||||
|
viper.SetDefault("pid.track", consts.DefaultTrackPID)
|
||||||
|
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
|
||||||
|
viper.SetDefault("inspect.enabled", true)
|
||||||
|
viper.SetDefault("inspect.maxrequests", 1)
|
||||||
|
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
|
||||||
|
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||||
|
viper.SetDefault("plugins.folder", "")
|
||||||
|
viper.SetDefault("plugins.enabled", false)
|
||||||
|
viper.SetDefault("plugins.cachesize", "100MB")
|
||||||
|
|
||||||
|
viper.SetDefault("meilisearch.enabled", false)
|
||||||
|
viper.SetDefault("meilisearch.host", "http://localhost:7700")
|
||||||
|
viper.SetDefault("meilisearch.apikey", "")
|
||||||
|
|
||||||
|
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||||
|
viper.SetDefault("devlogsourceline", false)
|
||||||
|
viper.SetDefault("devenableprofiler", false)
|
||||||
|
viper.SetDefault("devautocreateadminpassword", "")
|
||||||
|
viper.SetDefault("devautologinusername", "")
|
||||||
|
viper.SetDefault("devactivitypanel", true)
|
||||||
|
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
|
||||||
|
viper.SetDefault("devsidebarplaylists", true)
|
||||||
|
viper.SetDefault("devshowartistpage", true)
|
||||||
|
viper.SetDefault("devuishowconfig", true)
|
||||||
|
viper.SetDefault("devneweventstream", true)
|
||||||
|
viper.SetDefault("devoffsetoptimize", 50000)
|
||||||
|
viper.SetDefault("devartworkmaxrequests", max(2, runtime.NumCPU()/3))
|
||||||
|
viper.SetDefault("devartworkthrottlebackloglimit", consts.RequestThrottleBacklogLimit)
|
||||||
|
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||||
|
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
|
||||||
|
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
|
||||||
|
viper.SetDefault("devexternalscanner", true)
|
||||||
|
viper.SetDefault("devscannerthreads", 5)
|
||||||
|
viper.SetDefault("devselectivewatcher", true)
|
||||||
|
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
|
||||||
|
viper.SetDefault("devenableplayerinsights", true)
|
||||||
|
viper.SetDefault("devenablepluginsinsights", true)
|
||||||
|
viper.SetDefault("devplugincompilationtimeout", time.Minute)
|
||||||
|
viper.SetDefault("devexternalartistfetchmultiplier", 1.5)
|
||||||
|
viper.SetDefault("devoptimizedb", true)
|
||||||
|
viper.SetDefault("devpreserveunicodeinexternalcalls", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
setViperDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
func InitConfig(cfgFile string, loadEnvVars bool) {
|
||||||
|
codecRegistry := viper.NewCodecRegistry()
|
||||||
|
_ = codecRegistry.RegisterCodec("ini", ini.Codec{
|
||||||
|
LoadOptions: ini.LoadOptions{
|
||||||
|
UnescapeValueDoubleQuotes: true,
|
||||||
|
UnescapeValueCommentSymbols: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
|
||||||
|
|
||||||
|
cfgFile = getConfigFile(cfgFile)
|
||||||
|
if cfgFile != "" {
|
||||||
|
// Use config file from the flag.
|
||||||
|
viper.SetConfigFile(cfgFile)
|
||||||
|
} else {
|
||||||
|
// Search config in local directory with name "navidrome" (without extension).
|
||||||
|
viper.AddConfigPath(".")
|
||||||
|
viper.SetConfigName("navidrome")
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = viper.BindEnv("port")
|
||||||
|
if loadEnvVars {
|
||||||
|
viper.SetEnvPrefix("ND")
|
||||||
|
replacer := strings.NewReplacer(".", "_")
|
||||||
|
viper.SetEnvKeyReplacer(replacer)
|
||||||
|
viper.AutomaticEnv()
|
||||||
|
}
|
||||||
|
|
||||||
|
err := viper.ReadInConfig()
|
||||||
|
if viper.ConfigFileUsed() != "" && err != nil {
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Navidrome could not open config file: ", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
|
||||||
|
// If it is defined in the environment variable, it will check if the file exists.
|
||||||
|
func getConfigFile(cfgFile string) string {
|
||||||
|
if cfgFile != "" {
|
||||||
|
return cfgFile
|
||||||
|
}
|
||||||
|
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||||
|
if cfgFile != "" {
|
||||||
|
if _, err := os.Stat(cfgFile); err == nil {
|
||||||
|
return cfgFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
55
conf/configuration_test.go
Normal file
55
conf/configuration_test.go
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
package conf_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfiguration(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Configuration Suite")
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("Configuration", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
// Reset viper configuration
|
||||||
|
viper.Reset()
|
||||||
|
conf.SetViperDefaults()
|
||||||
|
viper.SetDefault("datafolder", GinkgoT().TempDir())
|
||||||
|
viper.SetDefault("loglevel", "error")
|
||||||
|
conf.ResetConf()
|
||||||
|
})
|
||||||
|
|
||||||
|
DescribeTable("should load configuration from",
|
||||||
|
func(format string) {
|
||||||
|
filename := filepath.Join("testdata", "cfg."+format)
|
||||||
|
|
||||||
|
// Initialize config with the test file
|
||||||
|
conf.InitConfig(filename, false)
|
||||||
|
// Load the configuration (with noConfigDump=true)
|
||||||
|
conf.Load(true)
|
||||||
|
|
||||||
|
// Execute the format-specific assertions
|
||||||
|
Expect(conf.Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
|
||||||
|
Expect(conf.Server.UIWelcomeMessage).To(Equal("Welcome " + format))
|
||||||
|
Expect(conf.Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||||
|
Expect(conf.Server.Tags["artist"].Split).To(Equal([]string{";"}))
|
||||||
|
|
||||||
|
// Check deprecated option mapping
|
||||||
|
Expect(conf.Server.ExtAuth.UserHeader).To(Equal("X-Auth-User"))
|
||||||
|
|
||||||
|
// The config file used should be the one we created
|
||||||
|
Expect(conf.Server.ConfigFile).To(Equal(filename))
|
||||||
|
},
|
||||||
|
Entry("TOML format", "toml"),
|
||||||
|
Entry("YAML format", "yaml"),
|
||||||
|
Entry("INI format", "ini"),
|
||||||
|
Entry("JSON format", "json"),
|
||||||
|
)
|
||||||
|
})
|
||||||
7
conf/export_test.go
Normal file
7
conf/export_test.go
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
package conf
|
||||||
|
|
||||||
|
func ResetConf() {
|
||||||
|
Server = &configOptions{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var SetViperDefaults = setViperDefaults
|
||||||
48
conf/mime/mime_types.go
Normal file
48
conf/mime/mime_types.go
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
package mime
|
||||||
|
|
||||||
|
import (
|
||||||
|
"mime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/resources"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mimeConf struct {
|
||||||
|
Types map[string]string `yaml:"types"`
|
||||||
|
Lossless []string `yaml:"lossless"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var LosslessFormats []string
|
||||||
|
|
||||||
|
func initMimeTypes() {
|
||||||
|
// In some circumstances, Windows sets JS mime-type to `text/plain`!
|
||||||
|
_ = mime.AddExtensionType(".js", "text/javascript")
|
||||||
|
_ = mime.AddExtensionType(".css", "text/css")
|
||||||
|
_ = mime.AddExtensionType(".webmanifest", "application/manifest+json")
|
||||||
|
|
||||||
|
f, err := resources.FS().Open("mime_types.yaml")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Fatal error opening mime_types.yaml", err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
var mimeConf mimeConf
|
||||||
|
err = yaml.NewDecoder(f).Decode(&mimeConf)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Fatal error parsing mime_types.yaml", err)
|
||||||
|
}
|
||||||
|
for ext, typ := range mimeConf.Types {
|
||||||
|
_ = mime.AddExtensionType(ext, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ext := range mimeConf.Lossless {
|
||||||
|
LosslessFormats = append(LosslessFormats, strings.TrimPrefix(ext, "."))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
conf.AddHook(initMimeTypes)
|
||||||
|
}
|
||||||
8
conf/testdata/cfg.ini
vendored
Normal file
8
conf/testdata/cfg.ini
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[default]
|
||||||
|
MusicFolder = /ini/music
|
||||||
|
UIWelcomeMessage = 'Welcome ini' ; Just a comment to test the LoadOptions
|
||||||
|
ReverseProxyUserHeader = 'X-Auth-User'
|
||||||
|
|
||||||
|
[Tags]
|
||||||
|
Custom.Aliases = ini,test
|
||||||
|
artist.Split = ";" # Should be able to read ; as a separator
|
||||||
16
conf/testdata/cfg.json
vendored
Normal file
16
conf/testdata/cfg.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"musicFolder": "/json/music",
|
||||||
|
"uiWelcomeMessage": "Welcome json",
|
||||||
|
"reverseProxyUserHeader": "X-Auth-User",
|
||||||
|
"Tags": {
|
||||||
|
"artist": {
|
||||||
|
"split": ";"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"aliases": [
|
||||||
|
"json",
|
||||||
|
"test"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
8
conf/testdata/cfg.toml
vendored
Normal file
8
conf/testdata/cfg.toml
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
musicFolder = "/toml/music"
|
||||||
|
uiWelcomeMessage = "Welcome toml"
|
||||||
|
ReverseProxyUserHeader = "X-Auth-User"
|
||||||
|
|
||||||
|
Tags.artist.Split = ';'
|
||||||
|
|
||||||
|
[Tags.custom]
|
||||||
|
aliases = ["toml", "test"]
|
||||||
10
conf/testdata/cfg.yaml
vendored
Normal file
10
conf/testdata/cfg.yaml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
musicFolder: "/yaml/music"
|
||||||
|
uiWelcomeMessage: "Welcome yaml"
|
||||||
|
reverseProxyUserHeader: "X-Auth-User"
|
||||||
|
Tags:
|
||||||
|
artist:
|
||||||
|
split: [";"]
|
||||||
|
custom:
|
||||||
|
aliases:
|
||||||
|
- yaml
|
||||||
|
- test
|
||||||
176
consts/consts.go
Normal file
176
consts/consts.go
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
package consts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model/id"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
AppName = "navidrome"
|
||||||
|
|
||||||
|
DefaultDbPath = "navidrome.db?cache=shared&_busy_timeout=15000&_journal_mode=WAL&_foreign_keys=on&synchronous=normal"
|
||||||
|
InitialSetupFlagKey = "InitialSetup"
|
||||||
|
FullScanAfterMigrationFlagKey = "FullScanAfterMigration"
|
||||||
|
LastScanErrorKey = "LastScanError"
|
||||||
|
LastScanTypeKey = "LastScanType"
|
||||||
|
LastScanStartTimeKey = "LastScanStartTime"
|
||||||
|
|
||||||
|
UIAuthorizationHeader = "X-ND-Authorization"
|
||||||
|
UIClientUniqueIDHeader = "X-ND-Client-Unique-Id"
|
||||||
|
JWTSecretKey = "JWTSecret"
|
||||||
|
JWTIssuer = "ND"
|
||||||
|
DefaultSessionTimeout = 48 * time.Hour
|
||||||
|
CookieExpiry = 365 * 24 * 3600 // One year
|
||||||
|
|
||||||
|
OptimizeDBSchedule = "@every 24h"
|
||||||
|
|
||||||
|
// DefaultEncryptionKey This is the encryption key used if none is specified in the `PasswordEncryptionKey` option
|
||||||
|
// Never ever change this! Or it will break all Navidrome installations that don't set the config option
|
||||||
|
DefaultEncryptionKey = "just for obfuscation"
|
||||||
|
PasswordsEncryptedKey = "PasswordsEncryptedKey"
|
||||||
|
PasswordAutogenPrefix = "__NAVIDROME_AUTOGEN__" //nolint:gosec
|
||||||
|
|
||||||
|
DevInitialUserName = "admin"
|
||||||
|
DevInitialName = "Dev Admin"
|
||||||
|
|
||||||
|
URLPathUI = "/app"
|
||||||
|
URLPathNativeAPI = "/api"
|
||||||
|
URLPathSubsonicAPI = "/rest"
|
||||||
|
URLPathPublic = "/share"
|
||||||
|
URLPathPublicImages = URLPathPublic + "/img"
|
||||||
|
|
||||||
|
// DefaultUILoginBackgroundURL uses Navidrome curated background images collection,
|
||||||
|
// available at https://unsplash.com/collections/20072696/navidrome
|
||||||
|
DefaultUILoginBackgroundURL = "/backgrounds"
|
||||||
|
|
||||||
|
// DefaultUILoginBackgroundOffline Background image used in case external integrations are disabled
|
||||||
|
DefaultUILoginBackgroundOffline = "iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAIAAAAiOjnJAAAABGdBTUEAALGPC/xhBQAAAiJJREFUeF7t0IEAAAAAw6D5Ux/khVBhwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDBgwIABAwYMGDDwMDDVlwABBWcSrQAAAABJRU5ErkJggg=="
|
||||||
|
DefaultUILoginBackgroundURLOffline = "data:image/png;base64," + DefaultUILoginBackgroundOffline
|
||||||
|
DefaultMaxSidebarPlaylists = 100
|
||||||
|
|
||||||
|
RequestThrottleBacklogLimit = 100
|
||||||
|
RequestThrottleBacklogTimeout = time.Minute
|
||||||
|
|
||||||
|
ServerReadHeaderTimeout = 3 * time.Second
|
||||||
|
|
||||||
|
ArtistInfoTimeToLive = 24 * time.Hour
|
||||||
|
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||||
|
UpdateLastAccessFrequency = time.Minute
|
||||||
|
UpdatePlayerFrequency = time.Minute
|
||||||
|
|
||||||
|
I18nFolder = "i18n"
|
||||||
|
ScanIgnoreFile = ".ndignore"
|
||||||
|
|
||||||
|
PlaceholderArtistArt = "artist-placeholder.webp"
|
||||||
|
PlaceholderAlbumArt = "album-placeholder.webp"
|
||||||
|
PlaceholderAvatar = "logo-192x192.png"
|
||||||
|
UICoverArtSize = 300
|
||||||
|
DefaultUIVolume = 100
|
||||||
|
|
||||||
|
DefaultHttpClientTimeOut = 10 * time.Second
|
||||||
|
|
||||||
|
DefaultScannerExtractor = "taglib"
|
||||||
|
DefaultWatcherWait = 5 * time.Second
|
||||||
|
Zwsp = string('\u200b')
|
||||||
|
)
|
||||||
|
|
||||||
|
// Prometheus options
|
||||||
|
const (
|
||||||
|
PrometheusDefaultPath = "/metrics"
|
||||||
|
PrometheusAuthUser = "navidrome"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Cache options
|
||||||
|
const (
|
||||||
|
TranscodingCacheDir = "transcoding"
|
||||||
|
DefaultTranscodingCacheMaxItems = 0 // Unlimited
|
||||||
|
|
||||||
|
ImageCacheDir = "images"
|
||||||
|
DefaultImageCacheMaxItems = 0 // Unlimited
|
||||||
|
|
||||||
|
DefaultCacheSize = 100 * 1024 * 1024 // 100MB
|
||||||
|
DefaultCacheCleanUpInterval = 10 * time.Minute
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
AlbumPlayCountModeAbsolute = "absolute"
|
||||||
|
AlbumPlayCountModeNormalized = "normalized"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
//DefaultAlbumPID = "album_legacy"
|
||||||
|
DefaultAlbumPID = "musicbrainz_albumid|albumartistid,album,albumversion,releasedate"
|
||||||
|
DefaultTrackPID = "musicbrainz_trackid|albumid,discnumber,tracknumber,title"
|
||||||
|
PIDAlbumKey = "PIDAlbum"
|
||||||
|
PIDTrackKey = "PIDTrack"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
InsightsIDKey = "InsightsID"
|
||||||
|
InsightsEndpoint = "https://insights.navidrome.org/collect"
|
||||||
|
InsightsUpdateInterval = 24 * time.Hour
|
||||||
|
InsightsInitialDelay = 30 * time.Minute
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
PurgeMissingNever = "never"
|
||||||
|
PurgeMissingAlways = "always"
|
||||||
|
PurgeMissingFull = "full"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
DefaultDownsamplingFormat = "opus"
|
||||||
|
DefaultTranscodings = []struct {
|
||||||
|
Name string
|
||||||
|
TargetFormat string
|
||||||
|
DefaultBitRate int
|
||||||
|
Command string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Name: "mp3 audio",
|
||||||
|
TargetFormat: "mp3",
|
||||||
|
DefaultBitRate: 192,
|
||||||
|
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -f mp3 -",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "opus audio",
|
||||||
|
TargetFormat: "opus",
|
||||||
|
DefaultBitRate: 128,
|
||||||
|
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a libopus -f opus -",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "aac audio",
|
||||||
|
TargetFormat: "aac",
|
||||||
|
DefaultBitRate: 256,
|
||||||
|
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
VariousArtists = "Various Artists"
|
||||||
|
// TODO This will be dynamic when using disambiguation
|
||||||
|
VariousArtistsID = "63sqASlAfjbGMuLP4JhnZU"
|
||||||
|
UnknownAlbum = "[Unknown Album]"
|
||||||
|
UnknownArtist = "[Unknown Artist]"
|
||||||
|
// TODO This will be dynamic when using disambiguation
|
||||||
|
UnknownArtistID = id.NewHash(strings.ToLower(UnknownArtist))
|
||||||
|
VariousArtistsMbzId = "89ad4ac3-39f7-470e-963a-56509c546377"
|
||||||
|
|
||||||
|
ArtistJoiner = " • "
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ServerStart = time.Now()
|
||||||
|
|
||||||
|
InContainer = func() bool {
|
||||||
|
// Check if the /.nddockerenv file exists
|
||||||
|
if _, err := os.Stat("/.nddockerenv"); err == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}()
|
||||||
|
)
|
||||||
26
consts/version.go
Normal file
26
consts/version.go
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
package consts
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// This will be set in build time. If not, version will be set to "dev"
|
||||||
|
gitTag string
|
||||||
|
gitSha string
|
||||||
|
)
|
||||||
|
|
||||||
|
// Version holds the version string, with tag and git sha info.
|
||||||
|
// Examples:
|
||||||
|
// dev
|
||||||
|
// v0.2.0 (5b84188)
|
||||||
|
// v0.3.2-SNAPSHOT (715f552)
|
||||||
|
// master (9ed35cb)
|
||||||
|
var Version = func() string {
|
||||||
|
if gitSha == "" {
|
||||||
|
return "dev"
|
||||||
|
}
|
||||||
|
gitTag = strings.TrimPrefix(gitTag, "v")
|
||||||
|
return fmt.Sprintf("%s (%s)", gitTag, gitSha)
|
||||||
|
}()
|
||||||
7
contrib/docker-compose/Caddyfile
Normal file
7
contrib/docker-compose/Caddyfile
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
https://your.website {
|
||||||
|
reverse_proxy * navidrome:4533 {
|
||||||
|
header_up Host {http.reverse_proxy.upstream.hostport}
|
||||||
|
header_up X-Forwarded-For {http.request.remote}
|
||||||
|
header_up X-Real-IP {http.reverse_proxy.upstream.port}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
contrib/docker-compose/docker-compose-caddy.yml
Normal file
31
contrib/docker-compose/docker-compose-caddy.yml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
version: '3.6'
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
caddy_data:
|
||||||
|
navidrome_data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
caddy:
|
||||||
|
container_name: "caddy"
|
||||||
|
image: caddy:2.6-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
read_only: true
|
||||||
|
volumes:
|
||||||
|
- "caddy_data:/data:rw"
|
||||||
|
- "./Caddyfile:/etc/caddy/Caddyfile:ro"
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
|
||||||
|
navidrome:
|
||||||
|
container_name: "navidrome"
|
||||||
|
image: deluan/navidrome:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
read_only: true
|
||||||
|
# user: 1000:1000
|
||||||
|
ports:
|
||||||
|
- "4533:4533"
|
||||||
|
volumes:
|
||||||
|
- "navidrome_data:/data"
|
||||||
|
#- "/mnt/music:/music:ro"
|
||||||
51
contrib/docker-compose/docker-compose-traefik.yml
Normal file
51
contrib/docker-compose/docker-compose-traefik.yml
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
version: "3.6"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
traefik_data:
|
||||||
|
navidrome_data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
traefik:
|
||||||
|
container_name: "traefik"
|
||||||
|
image: traefik:2.9
|
||||||
|
restart: unless-stopped
|
||||||
|
read_only: true
|
||||||
|
command:
|
||||||
|
- "--log.level=ERROR"
|
||||||
|
- "--providers.docker=true"
|
||||||
|
- "--providers.docker.exposedbydefault=false"
|
||||||
|
- "--entrypoints.websecure.address=:443"
|
||||||
|
- "--certificatesresolvers.tc.acme.tlschallenge=true"
|
||||||
|
#- "--certificatesresolvers.tc.acme.caserver=https://acme-staging-v02.api.letsencrypt.org/directory"
|
||||||
|
- "--certificatesresolvers.tc.acme.email=foo@foo.com"
|
||||||
|
- "--certificatesresolvers.tc.acme.storage=/letsencrypt/acme.json"
|
||||||
|
ports:
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- "traefik_data:/letsencrypt"
|
||||||
|
#- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||||
|
|
||||||
|
navidrome:
|
||||||
|
container_name: "navidrome"
|
||||||
|
image: deluan/navidrome:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
read_only: true
|
||||||
|
# user: 1000:1000
|
||||||
|
ports:
|
||||||
|
- "4533:4533"
|
||||||
|
environment:
|
||||||
|
ND_SCANINTERVAL: 6h
|
||||||
|
ND_LOGLEVEL: info
|
||||||
|
ND_SESSIONTIMEOUT: 168h
|
||||||
|
ND_BASEURL: ""
|
||||||
|
volumes:
|
||||||
|
- "navidrome_data:/data"
|
||||||
|
#- "/mnt/music:/music:ro"
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.http.routers.navidrome.rule=Host(`foo.com`)"
|
||||||
|
- "traefik.http.routers.navidrome.entrypoints=websecure"
|
||||||
|
- "traefik.http.routers.navidrome.tls=true"
|
||||||
|
- "traefik.http.routers.navidrome.tls.certresolver=tc"
|
||||||
|
- "traefik.http.services.navidrome.loadbalancer.server.port=4533"
|
||||||
52
contrib/freebsd_rc
Normal file
52
contrib/freebsd_rc
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# $FreeBSD: $
|
||||||
|
#
|
||||||
|
# PROVIDE: navidrome
|
||||||
|
# REQUIRE: NETWORKING
|
||||||
|
# KEYWORD:
|
||||||
|
#
|
||||||
|
# Add the following lines to /etc/rc.conf to enable navidrome:
|
||||||
|
# navidrome_enable="YES"
|
||||||
|
#
|
||||||
|
# navidrome_enable (bool): Set to YES to enable navidrome
|
||||||
|
# Default: NO
|
||||||
|
# navidrome_config (str): navidrome configuration file
|
||||||
|
# Default: /usr/local/etc/navidrome/config.toml
|
||||||
|
# navidrome_datafolder (str): navidrome Folder to store application data
|
||||||
|
# Default: www
|
||||||
|
# navidrome_user (str): navidrome daemon user
|
||||||
|
# Default: www
|
||||||
|
# navidrome_group (str): navidrome daemon group
|
||||||
|
# Default: www
|
||||||
|
|
||||||
|
. /etc/rc.subr
|
||||||
|
|
||||||
|
name="navidrome"
|
||||||
|
rcvar="navidrome_enable"
|
||||||
|
load_rc_config $name
|
||||||
|
|
||||||
|
: ${navidrome_user:="www"}
|
||||||
|
: ${navidrome_group:="www"}
|
||||||
|
: ${navidrome_enable:="NO"}
|
||||||
|
: ${navidrome_config:="/usr/local/etc/navidrome/config.toml"}
|
||||||
|
: ${navidrome_flags=""}
|
||||||
|
: ${navidrome_facility:="daemon"}
|
||||||
|
: ${navidrome_priority:="debug"}
|
||||||
|
: ${navidrome_datafolder:="/var/db/${name}"}
|
||||||
|
|
||||||
|
required_dirs=${navidrome_datafolder}
|
||||||
|
required_files=${navidrome_config}
|
||||||
|
procname="/usr/local/bin/${name}"
|
||||||
|
pidfile="/var/run/${name}.pid"
|
||||||
|
start_precmd="${name}_precmd"
|
||||||
|
command=/usr/sbin/daemon
|
||||||
|
command_args="-S -l ${navidrome_facility} -s ${navidrome_priority} -T ${name} -t ${name} -p ${pidfile} \
|
||||||
|
${procname} --configfile ${navidrome_config} --datafolder ${navidrome_datafolder} ${navidrome_flags}"
|
||||||
|
|
||||||
|
navidrome_precmd()
|
||||||
|
{
|
||||||
|
install -o ${navidrome_user} /dev/null ${pidfile}
|
||||||
|
}
|
||||||
|
|
||||||
|
run_rc_command "$1"
|
||||||
11
contrib/k8s/README.md
Normal file
11
contrib/k8s/README.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Kubernetes
|
||||||
|
|
||||||
|
A couple things to keep in mind with this manifest:
|
||||||
|
|
||||||
|
1. This creates a namespace called `navidrome`. Adjust this as needed.
|
||||||
|
1. This manifest was created on [K3s](https://github.com/k3s-io/k3s), which uses its own storage provisioner called [local-path-provisioner](https://github.com/rancher/local-path-provisioner). Be sure to change the `storageClassName` of the `PersistentVolumeClaim` as needed.
|
||||||
|
1. The `PersistentVolumeClaim` sets up a 2Gi volume for Navidrome's database. Adjust this as needed.
|
||||||
|
1. Be sure to change the `image` tag from `ghcr.io/navidrome/navidrome:0.49.3` to whatever the newest version is.
|
||||||
|
1. This assumes your music is mounted on the host using `hostPath` at `/path/to/your/music/on/the/host`. Adjust this as needed.
|
||||||
|
1. The `Ingress` is already configured for `cert-manager` to obtain a Let's Encrypt TLS certificate and uses Traefik for routing. Adjust this as needed.
|
||||||
|
1. The `Ingress` presents the service at `navidrome.${SECRET_INTERNAL_DOMAIN_NAME}`, which needs to already be setup in DNS.
|
||||||
111
contrib/k8s/manifest.yml
Normal file
111
contrib/k8s/manifest.yml
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Namespace
|
||||||
|
metadata:
|
||||||
|
name: navidrome
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: navidrome-data-pvc
|
||||||
|
namespace: navidrome
|
||||||
|
annotations:
|
||||||
|
volumeType: local
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 2Gi
|
||||||
|
storageClassName: local-path
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: navidrome-deployment
|
||||||
|
namespace: navidrome
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
revisionHistoryLimit: 2
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: navidrome
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: navidrome
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: navidrome
|
||||||
|
image: ghcr.io/navidrome/navidrome:0.49.3
|
||||||
|
ports:
|
||||||
|
- containerPort: 4533
|
||||||
|
env:
|
||||||
|
- name: ND_SCANSCHEDULE
|
||||||
|
value: "12h"
|
||||||
|
- name: ND_SESSIONTIMEOUT
|
||||||
|
value: "24h"
|
||||||
|
- name: ND_LOGLEVEL
|
||||||
|
value: "info"
|
||||||
|
- name: ND_ENABLETRANSCODINGCONFIG
|
||||||
|
value: "false"
|
||||||
|
- name: ND_TRANSCODINGCACHESIZE
|
||||||
|
value: "512MB"
|
||||||
|
- name: ND_ENABLESTARRATING
|
||||||
|
value: "false"
|
||||||
|
- name: ND_ENABLEFAVOURITES
|
||||||
|
value: "false"
|
||||||
|
volumeMounts:
|
||||||
|
- name: data
|
||||||
|
mountPath: /data
|
||||||
|
- name: music
|
||||||
|
mountPath: /music
|
||||||
|
readOnly: true
|
||||||
|
volumes:
|
||||||
|
- name: data
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: navidrome-data-pvc
|
||||||
|
- name: music
|
||||||
|
hostPath:
|
||||||
|
path: /path/to/your/music/on/the/host
|
||||||
|
type: Directory
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: navidrome-service
|
||||||
|
namespace: navidrome
|
||||||
|
spec:
|
||||||
|
type: ClusterIP
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
targetPort: 4533
|
||||||
|
port: 4533
|
||||||
|
protocol: TCP
|
||||||
|
selector:
|
||||||
|
app: navidrome
|
||||||
|
---
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: navidrome-ingress
|
||||||
|
namespace: navidrome
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: letsencrypt-production
|
||||||
|
traefik.ingress.kubernetes.io/router.tls: "true"
|
||||||
|
spec:
|
||||||
|
rules:
|
||||||
|
- host: navidrome.${SECRET_INTERNAL_DOMAIN_NAME}
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: navidrome-service
|
||||||
|
port:
|
||||||
|
number: 4533
|
||||||
|
tls:
|
||||||
|
- hosts:
|
||||||
|
- navidrome.${SECRET_INTERNAL_DOMAIN_NAME}
|
||||||
|
secretName: navidrome-tls
|
||||||
15
contrib/navidrome
Normal file
15
contrib/navidrome
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/sbin/openrc-run
|
||||||
|
|
||||||
|
name=$RC_SVCNAME
|
||||||
|
command="/opt/navidrome/${RC_SVCNAME}"
|
||||||
|
command_args="-datafolder /opt/navidrome"
|
||||||
|
command_user="${RC_SVCNAME}"
|
||||||
|
pidfile="/var/run/${RC_SVCNAME}.pid"
|
||||||
|
output_log="/opt/navidrome/${RC_SVCNAME}.log"
|
||||||
|
error_log="/opt/navidrome/${RC_SVCNAME}.err"
|
||||||
|
command_background="yes"
|
||||||
|
|
||||||
|
depend() {
|
||||||
|
need net
|
||||||
|
}
|
||||||
|
|
||||||
54
contrib/navidrome.service
Normal file
54
contrib/navidrome.service
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# This file ususaly goes in /etc/systemd/system
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=Navidrome Music Server and Streamer compatible with Subsonic/Airsonic
|
||||||
|
After=remote-fs.target network.target
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=navidrome
|
||||||
|
Group=navidrome
|
||||||
|
Type=simple
|
||||||
|
ExecStart=/usr/bin/navidrome --configfile "/etc/navidrome/navidrome.toml"
|
||||||
|
StateDirectory=navidrome
|
||||||
|
WorkingDirectory=/var/lib/navidrome
|
||||||
|
TimeoutStopSec=20
|
||||||
|
KillMode=process
|
||||||
|
Restart=on-failure
|
||||||
|
|
||||||
|
# See https://www.freedesktop.org/software/systemd/man/systemd.exec.html
|
||||||
|
CapabilityBoundingSet=
|
||||||
|
DevicePolicy=closed
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
LockPersonality=yes
|
||||||
|
PrivateTmp=yes
|
||||||
|
PrivateUsers=yes
|
||||||
|
ProtectControlGroups=yes
|
||||||
|
ProtectKernelModules=yes
|
||||||
|
ProtectKernelTunables=yes
|
||||||
|
ProtectClock=yes
|
||||||
|
ProtectHostname=yes
|
||||||
|
ProtectKernelLogs=yes
|
||||||
|
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||||
|
RestrictNamespaces=yes
|
||||||
|
RestrictRealtime=yes
|
||||||
|
SystemCallFilter=@system-service
|
||||||
|
SystemCallFilter=~@privileged @resources
|
||||||
|
SystemCallFilter=setrlimit
|
||||||
|
SystemCallArchitectures=native
|
||||||
|
UMask=0066
|
||||||
|
|
||||||
|
# You can uncomment the following line if you're not using the jukebox This
|
||||||
|
# will prevent navidrome from accessing any real (physical) devices
|
||||||
|
#PrivateDevices=yes
|
||||||
|
|
||||||
|
# You can change the following line to `strict` instead of `full` if you don't
|
||||||
|
# want navidrome to be able to write anything on your filesystem outside of
|
||||||
|
# /var/lib/navidrome.
|
||||||
|
ProtectSystem=full
|
||||||
|
|
||||||
|
# You can comment the following line if you don't have any media in /home/*.
|
||||||
|
# This will prevent navidrome from ever reading/writing anything there.
|
||||||
|
ProtectHome=true
|
||||||
12
core/agents/README.md
Normal file
12
core/agents/README.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
This folder abstracts metadata lookup into "agents". Each agent can be implemented to get as
|
||||||
|
much info as the external source provides, by using a granular set of interfaces
|
||||||
|
(see [interfaces](interfaces.go)).
|
||||||
|
|
||||||
|
A new agent must comply with these simple implementation rules:
|
||||||
|
1) Implement the `AgentName()` method. It just returns the name of the agent for logging purposes.
|
||||||
|
2) Implement one or more of the `*Retriever()` interfaces. That's where the agent's logic resides.
|
||||||
|
3) Register itself (in its `init()` function).
|
||||||
|
|
||||||
|
For an agent to be used it needs to be listed in the `Agents` config option (default is `"lastfm,spotify"`). The order dictates the priority of the agents
|
||||||
|
|
||||||
|
For a simple Agent example, look at the [local_agent](local_agent.go) agent source code.
|
||||||
374
core/agents/agents.go
Normal file
374
core/agents/agents.go
Normal file
@@ -0,0 +1,374 @@
|
|||||||
|
package agents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/utils"
|
||||||
|
"github.com/navidrome/navidrome/utils/singleton"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PluginLoader defines an interface for loading plugins
|
||||||
|
type PluginLoader interface {
|
||||||
|
// PluginNames returns the names of all plugins that implement a particular service
|
||||||
|
PluginNames(capability string) []string
|
||||||
|
// LoadMediaAgent loads and returns a media agent plugin
|
||||||
|
LoadMediaAgent(name string) (Interface, bool)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Agents struct {
|
||||||
|
ds model.DataStore
|
||||||
|
pluginLoader PluginLoader
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAgents returns the singleton instance of Agents
|
||||||
|
func GetAgents(ds model.DataStore, pluginLoader PluginLoader) *Agents {
|
||||||
|
return singleton.GetInstance(func() *Agents {
|
||||||
|
return createAgents(ds, pluginLoader)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// createAgents creates a new Agents instance. Used in tests
|
||||||
|
func createAgents(ds model.DataStore, pluginLoader PluginLoader) *Agents {
|
||||||
|
return &Agents{
|
||||||
|
ds: ds,
|
||||||
|
pluginLoader: pluginLoader,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// enabledAgent represents an enabled agent with its type information
|
||||||
|
type enabledAgent struct {
|
||||||
|
name string
|
||||||
|
isPlugin bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// getEnabledAgentNames returns the current list of enabled agents, including:
|
||||||
|
// 1. Built-in agents and plugins from config (in the specified order)
|
||||||
|
// 2. Always include LocalAgentName
|
||||||
|
// 3. If config is empty, include ONLY LocalAgentName
|
||||||
|
// Each enabledAgent contains the name and whether it's a plugin (true) or built-in (false)
|
||||||
|
func (a *Agents) getEnabledAgentNames() []enabledAgent {
|
||||||
|
// If no agents configured, ONLY use the local agent
|
||||||
|
if conf.Server.Agents == "" {
|
||||||
|
return []enabledAgent{{name: LocalAgentName, isPlugin: false}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all available plugin names
|
||||||
|
var availablePlugins []string
|
||||||
|
if a.pluginLoader != nil {
|
||||||
|
availablePlugins = a.pluginLoader.PluginNames("MetadataAgent")
|
||||||
|
}
|
||||||
|
|
||||||
|
configuredAgents := strings.Split(conf.Server.Agents, ",")
|
||||||
|
|
||||||
|
// Always add LocalAgentName if not already included
|
||||||
|
hasLocalAgent := slices.Contains(configuredAgents, LocalAgentName)
|
||||||
|
if !hasLocalAgent {
|
||||||
|
configuredAgents = append(configuredAgents, LocalAgentName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter to only include valid agents (built-in or plugins)
|
||||||
|
var validAgents []enabledAgent
|
||||||
|
for _, name := range configuredAgents {
|
||||||
|
// Check if it's a built-in agent
|
||||||
|
isBuiltIn := Map[name] != nil
|
||||||
|
|
||||||
|
// Check if it's a plugin
|
||||||
|
isPlugin := slices.Contains(availablePlugins, name)
|
||||||
|
|
||||||
|
if isBuiltIn {
|
||||||
|
validAgents = append(validAgents, enabledAgent{name: name, isPlugin: false})
|
||||||
|
} else if isPlugin {
|
||||||
|
validAgents = append(validAgents, enabledAgent{name: name, isPlugin: true})
|
||||||
|
} else {
|
||||||
|
log.Debug("Unknown agent ignored", "name", name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return validAgents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) getAgent(ea enabledAgent) Interface {
|
||||||
|
if ea.isPlugin {
|
||||||
|
// Try to load WASM plugin agent (if plugin loader is available)
|
||||||
|
if a.pluginLoader != nil {
|
||||||
|
agent, ok := a.pluginLoader.LoadMediaAgent(ea.name)
|
||||||
|
if ok && agent != nil {
|
||||||
|
return agent
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Try to get built-in agent
|
||||||
|
constructor, ok := Map[ea.name]
|
||||||
|
if ok {
|
||||||
|
agent := constructor(a.ds)
|
||||||
|
if agent != nil {
|
||||||
|
return agent
|
||||||
|
}
|
||||||
|
log.Debug("Built-in agent not available. Missing configuration?", "name", ea.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) AgentName() string {
|
||||||
|
return "agents"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return "", ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistMBIDRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mbid, err := retriever.GetArtistMBID(ctx, id, name)
|
||||||
|
if mbid != "" && err == nil {
|
||||||
|
log.Debug(ctx, "Got MBID", "agent", ag.AgentName(), "artist", name, "mbid", mbid, "elapsed", time.Since(start))
|
||||||
|
return mbid, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return "", ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistURLRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
url, err := retriever.GetArtistURL(ctx, id, name, mbid)
|
||||||
|
if url != "" && err == nil {
|
||||||
|
log.Debug(ctx, "Got External Url", "agent", ag.AgentName(), "artist", name, "url", url, "elapsed", time.Since(start))
|
||||||
|
return url, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return "", ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistBiographyRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
bio, err := retriever.GetArtistBiography(ctx, id, name, mbid)
|
||||||
|
if err == nil {
|
||||||
|
log.Debug(ctx, "Got Biography", "agent", ag.AgentName(), "artist", name, "len", len(bio), "elapsed", time.Since(start))
|
||||||
|
return bio, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSimilarArtists returns similar artists by id, name, and/or mbid. Because some artists returned from an enabled
|
||||||
|
// agent may not exist in the database, return at most limit * conf.Server.DevExternalArtistFetchMultiplier items.
|
||||||
|
func (a *Agents) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]Artist, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return nil, ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
overLimit := int(float64(limit) * conf.Server.DevExternalArtistFetchMultiplier)
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistSimilarRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
similar, err := retriever.GetSimilarArtists(ctx, id, name, mbid, overLimit)
|
||||||
|
if len(similar) > 0 && err == nil {
|
||||||
|
if log.IsGreaterOrEqualTo(log.LevelTrace) {
|
||||||
|
log.Debug(ctx, "Got Similar Artists", "agent", ag.AgentName(), "artist", name, "similar", similar, "elapsed", time.Since(start))
|
||||||
|
} else {
|
||||||
|
log.Debug(ctx, "Got Similar Artists", "agent", ag.AgentName(), "artist", name, "similarReceived", len(similar), "elapsed", time.Since(start))
|
||||||
|
}
|
||||||
|
return similar, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return nil, ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistImageRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
images, err := retriever.GetArtistImages(ctx, id, name, mbid)
|
||||||
|
if len(images) > 0 && err == nil {
|
||||||
|
log.Debug(ctx, "Got Images", "agent", ag.AgentName(), "artist", name, "images", images, "elapsed", time.Since(start))
|
||||||
|
return images, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetArtistTopSongs returns top songs by id, name, and/or mbid. Because some songs returned from an enabled
|
||||||
|
// agent may not exist in the database, return at most limit * conf.Server.DevExternalArtistFetchMultiplier items.
|
||||||
|
func (a *Agents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]Song, error) {
|
||||||
|
switch id {
|
||||||
|
case consts.UnknownArtistID:
|
||||||
|
return nil, ErrNotFound
|
||||||
|
case consts.VariousArtistsID:
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
overLimit := int(float64(count) * conf.Server.DevExternalArtistFetchMultiplier)
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(ArtistTopSongsRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
songs, err := retriever.GetArtistTopSongs(ctx, id, artistName, mbid, overLimit)
|
||||||
|
if len(songs) > 0 && err == nil {
|
||||||
|
log.Debug(ctx, "Got Top Songs", "agent", ag.AgentName(), "artist", artistName, "songs", songs, "elapsed", time.Since(start))
|
||||||
|
return songs, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*AlbumInfo, error) {
|
||||||
|
if name == consts.UnknownAlbum {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(AlbumInfoRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
album, err := retriever.GetAlbumInfo(ctx, name, artist, mbid)
|
||||||
|
if err == nil {
|
||||||
|
log.Debug(ctx, "Got Album Info", "agent", ag.AgentName(), "album", name, "artist", artist,
|
||||||
|
"mbid", mbid, "elapsed", time.Since(start))
|
||||||
|
return album, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agents) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]ExternalImage, error) {
|
||||||
|
if name == consts.UnknownAlbum {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
start := time.Now()
|
||||||
|
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||||
|
ag := a.getAgent(enabledAgent)
|
||||||
|
if ag == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.IsCtxDone(ctx) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retriever, ok := ag.(AlbumImageRetriever)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
images, err := retriever.GetAlbumImages(ctx, name, artist, mbid)
|
||||||
|
if len(images) > 0 && err == nil {
|
||||||
|
log.Debug(ctx, "Got Album Images", "agent", ag.AgentName(), "album", name, "artist", artist,
|
||||||
|
"mbid", mbid, "elapsed", time.Since(start))
|
||||||
|
return images, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Interface = (*Agents)(nil)
|
||||||
|
var _ ArtistMBIDRetriever = (*Agents)(nil)
|
||||||
|
var _ ArtistURLRetriever = (*Agents)(nil)
|
||||||
|
var _ ArtistBiographyRetriever = (*Agents)(nil)
|
||||||
|
var _ ArtistSimilarRetriever = (*Agents)(nil)
|
||||||
|
var _ ArtistImageRetriever = (*Agents)(nil)
|
||||||
|
var _ ArtistTopSongsRetriever = (*Agents)(nil)
|
||||||
|
var _ AlbumInfoRetriever = (*Agents)(nil)
|
||||||
|
var _ AlbumImageRetriever = (*Agents)(nil)
|
||||||
281
core/agents/agents_plugin_test.go
Normal file
281
core/agents/agents_plugin_test.go
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
package agents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/utils/slice"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MockPluginLoader implements PluginLoader for testing
|
||||||
|
type MockPluginLoader struct {
|
||||||
|
pluginNames []string
|
||||||
|
loadedAgents map[string]*MockAgent
|
||||||
|
pluginCallCount map[string]int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMockPluginLoader() *MockPluginLoader {
|
||||||
|
return &MockPluginLoader{
|
||||||
|
pluginNames: []string{},
|
||||||
|
loadedAgents: make(map[string]*MockAgent),
|
||||||
|
pluginCallCount: make(map[string]int),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockPluginLoader) PluginNames(serviceName string) []string {
|
||||||
|
return m.pluginNames
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockPluginLoader) LoadMediaAgent(name string) (Interface, bool) {
|
||||||
|
m.pluginCallCount[name]++
|
||||||
|
agent, exists := m.loadedAgents[name]
|
||||||
|
return agent, exists
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockAgent is a mock agent implementation for testing
|
||||||
|
type MockAgent struct {
|
||||||
|
name string
|
||||||
|
mbid string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockAgent) AgentName() string {
|
||||||
|
return m.name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||||
|
return m.mbid, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Interface = (*MockAgent)(nil)
|
||||||
|
var _ ArtistMBIDRetriever = (*MockAgent)(nil)
|
||||||
|
|
||||||
|
var _ PluginLoader = (*MockPluginLoader)(nil)
|
||||||
|
|
||||||
|
var _ = Describe("Agents with Plugin Loading", func() {
|
||||||
|
var mockLoader *MockPluginLoader
|
||||||
|
var agents *Agents
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
mockLoader = NewMockPluginLoader()
|
||||||
|
|
||||||
|
// Create the agents instance with our mock loader
|
||||||
|
agents = createAgents(nil, mockLoader)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("Dynamic agent discovery", func() {
|
||||||
|
It("should include ONLY local agent when no config is specified", func() {
|
||||||
|
// Ensure no specific agents are configured
|
||||||
|
conf.Server.Agents = ""
|
||||||
|
|
||||||
|
// Add some plugin agents that should be ignored
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent", "another_plugin")
|
||||||
|
|
||||||
|
// Should only include the local agent
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
Expect(enabledAgents).To(HaveLen(1))
|
||||||
|
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||||
|
Expect(enabledAgents[0].isPlugin).To(BeFalse()) // LocalAgent is built-in, not plugin
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should NOT include plugin agents when no config is specified", func() {
|
||||||
|
// Ensure no specific agents are configured
|
||||||
|
conf.Server.Agents = ""
|
||||||
|
|
||||||
|
// Add a plugin agent
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||||
|
|
||||||
|
// Should only include the local agent
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
Expect(enabledAgents).To(HaveLen(1))
|
||||||
|
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||||
|
Expect(enabledAgents[0].isPlugin).To(BeFalse()) // LocalAgent is built-in, not plugin
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should include plugin agents in the enabled agents list ONLY when explicitly configured", func() {
|
||||||
|
// Add a plugin agent
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||||
|
|
||||||
|
// With no config, should not include plugin
|
||||||
|
conf.Server.Agents = ""
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
Expect(enabledAgents).To(HaveLen(1))
|
||||||
|
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||||
|
|
||||||
|
// When explicitly configured, should include plugin
|
||||||
|
conf.Server.Agents = "plugin_agent"
|
||||||
|
enabledAgents = agents.getEnabledAgentNames()
|
||||||
|
var agentNames []string
|
||||||
|
var pluginAgentFound bool
|
||||||
|
for _, agent := range enabledAgents {
|
||||||
|
agentNames = append(agentNames, agent.name)
|
||||||
|
if agent.name == "plugin_agent" {
|
||||||
|
pluginAgentFound = true
|
||||||
|
Expect(agent.isPlugin).To(BeTrue()) // plugin_agent is a plugin
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expect(agentNames).To(ContainElements(LocalAgentName, "plugin_agent"))
|
||||||
|
Expect(pluginAgentFound).To(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should only include configured plugin agents when config is specified", func() {
|
||||||
|
// Add two plugin agents
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_one", "plugin_two")
|
||||||
|
|
||||||
|
// Configure only one of them
|
||||||
|
conf.Server.Agents = "plugin_one"
|
||||||
|
|
||||||
|
// Verify only the configured one is included
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
var agentNames []string
|
||||||
|
var pluginOneFound bool
|
||||||
|
for _, agent := range enabledAgents {
|
||||||
|
agentNames = append(agentNames, agent.name)
|
||||||
|
if agent.name == "plugin_one" {
|
||||||
|
pluginOneFound = true
|
||||||
|
Expect(agent.isPlugin).To(BeTrue()) // plugin_one is a plugin
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expect(agentNames).To(ContainElements(LocalAgentName, "plugin_one"))
|
||||||
|
Expect(agentNames).NotTo(ContainElement("plugin_two"))
|
||||||
|
Expect(pluginOneFound).To(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should load plugin agents on demand", func() {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Configure to use our plugin
|
||||||
|
conf.Server.Agents = "plugin_agent"
|
||||||
|
|
||||||
|
// Add a plugin agent
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||||
|
mockLoader.loadedAgents["plugin_agent"] = &MockAgent{
|
||||||
|
name: "plugin_agent",
|
||||||
|
mbid: "plugin-mbid",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get data from it
|
||||||
|
mbid, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mbid).To(Equal("plugin-mbid"))
|
||||||
|
Expect(mockLoader.pluginCallCount["plugin_agent"]).To(Equal(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should try both built-in and plugin agents", func() {
|
||||||
|
// Create a mock built-in agent
|
||||||
|
Register("built_in", func(ds model.DataStore) Interface {
|
||||||
|
return &MockAgent{
|
||||||
|
name: "built_in",
|
||||||
|
mbid: "built-in-mbid",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
defer func() {
|
||||||
|
delete(Map, "built_in")
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Configure to use both built-in and plugin
|
||||||
|
conf.Server.Agents = "built_in,plugin_agent"
|
||||||
|
|
||||||
|
// Add a plugin agent
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||||
|
mockLoader.loadedAgents["plugin_agent"] = &MockAgent{
|
||||||
|
name: "plugin_agent",
|
||||||
|
mbid: "plugin-mbid",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that both are in the enabled list
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
var agentNames []string
|
||||||
|
var builtInFound, pluginFound bool
|
||||||
|
for _, agent := range enabledAgents {
|
||||||
|
agentNames = append(agentNames, agent.name)
|
||||||
|
if agent.name == "built_in" {
|
||||||
|
builtInFound = true
|
||||||
|
Expect(agent.isPlugin).To(BeFalse()) // built-in agent
|
||||||
|
}
|
||||||
|
if agent.name == "plugin_agent" {
|
||||||
|
pluginFound = true
|
||||||
|
Expect(agent.isPlugin).To(BeTrue()) // plugin agent
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expect(agentNames).To(ContainElements("built_in", "plugin_agent", LocalAgentName))
|
||||||
|
Expect(builtInFound).To(BeTrue())
|
||||||
|
Expect(pluginFound).To(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should respect the order specified in configuration", func() {
|
||||||
|
// Create mock built-in agents
|
||||||
|
Register("agent_a", func(ds model.DataStore) Interface {
|
||||||
|
return &MockAgent{name: "agent_a"}
|
||||||
|
})
|
||||||
|
Register("agent_b", func(ds model.DataStore) Interface {
|
||||||
|
return &MockAgent{name: "agent_b"}
|
||||||
|
})
|
||||||
|
defer func() {
|
||||||
|
delete(Map, "agent_a")
|
||||||
|
delete(Map, "agent_b")
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Add plugin agents
|
||||||
|
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_x", "plugin_y")
|
||||||
|
|
||||||
|
// Configure specific order - plugin first, then built-ins
|
||||||
|
conf.Server.Agents = "plugin_y,agent_b,plugin_x,agent_a"
|
||||||
|
|
||||||
|
// Get the agent names
|
||||||
|
enabledAgents := agents.getEnabledAgentNames()
|
||||||
|
|
||||||
|
// Extract just the names to verify the order
|
||||||
|
agentNames := slice.Map(enabledAgents, func(a enabledAgent) string { return a.name })
|
||||||
|
|
||||||
|
// Verify the order matches configuration, with LocalAgentName at the end
|
||||||
|
Expect(agentNames).To(HaveExactElements("plugin_y", "agent_b", "plugin_x", "agent_a", LocalAgentName))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should NOT call LoadMediaAgent for built-in agents", func() {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Create a mock built-in agent
|
||||||
|
Register("builtin_agent", func(ds model.DataStore) Interface {
|
||||||
|
return &MockAgent{
|
||||||
|
name: "builtin_agent",
|
||||||
|
mbid: "builtin-mbid",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
defer func() {
|
||||||
|
delete(Map, "builtin_agent")
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Configure to use only built-in agents
|
||||||
|
conf.Server.Agents = "builtin_agent"
|
||||||
|
|
||||||
|
// Call GetArtistMBID which should only use the built-in agent
|
||||||
|
mbid, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mbid).To(Equal("builtin-mbid"))
|
||||||
|
|
||||||
|
// Verify LoadMediaAgent was NEVER called (no plugin loading for built-in agents)
|
||||||
|
Expect(mockLoader.pluginCallCount).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should NOT call LoadMediaAgent for invalid agent names", func() {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Configure with an invalid agent name (not built-in, not a plugin)
|
||||||
|
conf.Server.Agents = "invalid_agent"
|
||||||
|
|
||||||
|
// This should only result in using the local agent (as the invalid one is ignored)
|
||||||
|
_, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||||
|
|
||||||
|
// Should get ErrNotFound since only local agent is available and it returns not found for this operation
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
|
||||||
|
// Verify LoadMediaAgent was NEVER called for the invalid agent
|
||||||
|
Expect(mockLoader.pluginCallCount).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
17
core/agents/agents_suite_test.go
Normal file
17
core/agents/agents_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package agents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAgents(t *testing.T) {
|
||||||
|
tests.Init(t, false)
|
||||||
|
log.SetLevel(log.LevelFatal)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Agents Test Suite")
|
||||||
|
}
|
||||||
400
core/agents/agents_test.go
Normal file
400
core/agents/agents_test.go
Normal file
@@ -0,0 +1,400 @@
|
|||||||
|
package agents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf/configtest"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Agents", func() {
|
||||||
|
var ctx context.Context
|
||||||
|
var cancel context.CancelFunc
|
||||||
|
var ds model.DataStore
|
||||||
|
var mfRepo *tests.MockMediaFileRepo
|
||||||
|
BeforeEach(func() {
|
||||||
|
DeferCleanup(configtest.SetupConfig())
|
||||||
|
ctx, cancel = context.WithCancel(context.Background())
|
||||||
|
mfRepo = tests.CreateMockMediaFileRepo()
|
||||||
|
ds = &tests.MockDataStore{MockedMediaFile: mfRepo}
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Local", func() {
|
||||||
|
var ag *Agents
|
||||||
|
BeforeEach(func() {
|
||||||
|
conf.Server.Agents = ""
|
||||||
|
ag = createAgents(ds, nil)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("calls the placeholder GetArtistImages", func() {
|
||||||
|
mfRepo.SetData(model.MediaFiles{{ID: "1", Title: "One", MbzReleaseTrackID: "111"}, {ID: "2", Title: "Two", MbzReleaseTrackID: "222"}})
|
||||||
|
songs, err := ag.GetArtistTopSongs(ctx, "123", "John Doe", "mb123", 2)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(songs).To(ConsistOf([]Song{{Name: "One", MBID: "111"}, {Name: "Two", MBID: "222"}}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Agents", func() {
|
||||||
|
var ag *Agents
|
||||||
|
var mock *mockAgent
|
||||||
|
BeforeEach(func() {
|
||||||
|
mock = &mockAgent{}
|
||||||
|
Register("fake", func(model.DataStore) Interface { return mock })
|
||||||
|
Register("disabled", func(model.DataStore) Interface { return nil })
|
||||||
|
Register("empty", func(model.DataStore) Interface { return &emptyAgent{} })
|
||||||
|
conf.Server.Agents = "empty,fake,disabled"
|
||||||
|
ag = createAgents(ds, nil)
|
||||||
|
Expect(ag.AgentName()).To(Equal("agents"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("does not register disabled agents", func() {
|
||||||
|
var ags []string
|
||||||
|
for _, enabledAgent := range ag.getEnabledAgentNames() {
|
||||||
|
agent := ag.getAgent(enabledAgent)
|
||||||
|
if agent != nil {
|
||||||
|
ags = append(ags, agent.AgentName())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// local agent is always appended to the end of the agents list
|
||||||
|
Expect(ags).To(HaveExactElements("empty", "fake", "local"))
|
||||||
|
Expect(ags).ToNot(ContainElement("disabled"))
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistMBID", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
Expect(ag.GetArtistMBID(ctx, "123", "test")).To(Equal("mbid"))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test"))
|
||||||
|
})
|
||||||
|
It("returns empty if artist is Various Artists", func() {
|
||||||
|
mbid, err := ag.GetArtistMBID(ctx, consts.VariousArtistsID, consts.VariousArtists)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mbid).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("returns not found if artist is Unknown Artist", func() {
|
||||||
|
mbid, err := ag.GetArtistMBID(ctx, consts.VariousArtistsID, consts.VariousArtists)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(mbid).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetArtistMBID(ctx, "123", "test")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test"))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetArtistMBID(ctx, "123", "test")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistURL", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
Expect(ag.GetArtistURL(ctx, "123", "test", "mb123")).To(Equal("url"))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("returns empty if artist is Various Artists", func() {
|
||||||
|
url, err := ag.GetArtistURL(ctx, consts.VariousArtistsID, consts.VariousArtists, "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(url).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("returns not found if artist is Unknown Artist", func() {
|
||||||
|
url, err := ag.GetArtistURL(ctx, consts.VariousArtistsID, consts.VariousArtists, "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(url).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetArtistURL(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetArtistURL(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistBiography", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
Expect(ag.GetArtistBiography(ctx, "123", "test", "mb123")).To(Equal("bio"))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("returns empty if artist is Various Artists", func() {
|
||||||
|
bio, err := ag.GetArtistBiography(ctx, consts.VariousArtistsID, consts.VariousArtists, "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(bio).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("returns not found if artist is Unknown Artist", func() {
|
||||||
|
bio, err := ag.GetArtistBiography(ctx, consts.VariousArtistsID, consts.VariousArtists, "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(bio).To(BeEmpty())
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetArtistBiography(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetArtistBiography(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistImages", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
Expect(ag.GetArtistImages(ctx, "123", "test", "mb123")).To(Equal([]ExternalImage{{
|
||||||
|
URL: "imageUrl",
|
||||||
|
Size: 100,
|
||||||
|
}}))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetArtistImages(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError("not found"))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123"))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetArtistImages(ctx, "123", "test", "mb123")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with multiple image agents", func() {
|
||||||
|
var first *testImageAgent
|
||||||
|
var second *testImageAgent
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
first = &testImageAgent{Name: "imgFail", Err: errors.New("fail")}
|
||||||
|
second = &testImageAgent{Name: "imgOk", Images: []ExternalImage{{URL: "ok", Size: 1}}}
|
||||||
|
Register("imgFail", func(model.DataStore) Interface { return first })
|
||||||
|
Register("imgOk", func(model.DataStore) Interface { return second })
|
||||||
|
})
|
||||||
|
|
||||||
|
It("falls back to the next agent on error", func() {
|
||||||
|
conf.Server.Agents = "imgFail,imgOk"
|
||||||
|
ag = createAgents(ds, nil)
|
||||||
|
|
||||||
|
images, err := ag.GetArtistImages(ctx, "id", "artist", "mbid")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(images).To(Equal([]ExternalImage{{URL: "ok", Size: 1}}))
|
||||||
|
Expect(first.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||||
|
Expect(second.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("falls back if the first agent returns no images", func() {
|
||||||
|
first.Err = nil
|
||||||
|
first.Images = []ExternalImage{}
|
||||||
|
conf.Server.Agents = "imgFail,imgOk"
|
||||||
|
ag = createAgents(ds, nil)
|
||||||
|
|
||||||
|
images, err := ag.GetArtistImages(ctx, "id", "artist", "mbid")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(images).To(Equal([]ExternalImage{{URL: "ok", Size: 1}}))
|
||||||
|
Expect(first.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||||
|
Expect(second.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetSimilarArtists", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
Expect(ag.GetSimilarArtists(ctx, "123", "test", "mb123", 1)).To(Equal([]Artist{{
|
||||||
|
Name: "Joe Dohn",
|
||||||
|
MBID: "mbid321",
|
||||||
|
}}))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 1))
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetSimilarArtists(ctx, "123", "test", "mb123", 1)
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 1))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetSimilarArtists(ctx, "123", "test", "mb123", 1)
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistTopSongs", func() {
|
||||||
|
It("returns on first match", func() {
|
||||||
|
conf.Server.DevExternalArtistFetchMultiplier = 1
|
||||||
|
Expect(ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)).To(Equal([]Song{{
|
||||||
|
Name: "A Song",
|
||||||
|
MBID: "mbid444",
|
||||||
|
}}))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 2))
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
conf.Server.DevExternalArtistFetchMultiplier = 1
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 2))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
It("fetches with multiplier", func() {
|
||||||
|
conf.Server.DevExternalArtistFetchMultiplier = 2
|
||||||
|
Expect(ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)).To(Equal([]Song{{
|
||||||
|
Name: "A Song",
|
||||||
|
MBID: "mbid444",
|
||||||
|
}}))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 4))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetAlbumInfo", func() {
|
||||||
|
It("returns meaningful data", func() {
|
||||||
|
Expect(ag.GetAlbumInfo(ctx, "album", "artist", "mbid")).To(Equal(&AlbumInfo{
|
||||||
|
Name: "A Song",
|
||||||
|
MBID: "mbid444",
|
||||||
|
Description: "A Description",
|
||||||
|
URL: "External URL",
|
||||||
|
}))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("album", "artist", "mbid"))
|
||||||
|
})
|
||||||
|
It("skips the agent if it returns an error", func() {
|
||||||
|
mock.Err = errors.New("error")
|
||||||
|
_, err := ag.GetAlbumInfo(ctx, "album", "artist", "mbid")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(HaveExactElements("album", "artist", "mbid"))
|
||||||
|
})
|
||||||
|
It("interrupts if the context is canceled", func() {
|
||||||
|
cancel()
|
||||||
|
_, err := ag.GetAlbumInfo(ctx, "album", "artist", "mbid")
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
Expect(mock.Args).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
type mockAgent struct {
|
||||||
|
Args []interface{}
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) AgentName() string {
|
||||||
|
return "fake"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetArtistMBID(_ context.Context, id string, name string) (string, error) {
|
||||||
|
a.Args = []interface{}{id, name}
|
||||||
|
if a.Err != nil {
|
||||||
|
return "", a.Err
|
||||||
|
}
|
||||||
|
return "mbid", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetArtistURL(_ context.Context, id, name, mbid string) (string, error) {
|
||||||
|
a.Args = []interface{}{id, name, mbid}
|
||||||
|
if a.Err != nil {
|
||||||
|
return "", a.Err
|
||||||
|
}
|
||||||
|
return "url", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetArtistBiography(_ context.Context, id, name, mbid string) (string, error) {
|
||||||
|
a.Args = []interface{}{id, name, mbid}
|
||||||
|
if a.Err != nil {
|
||||||
|
return "", a.Err
|
||||||
|
}
|
||||||
|
return "bio", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||||
|
a.Args = []interface{}{id, name, mbid}
|
||||||
|
if a.Err != nil {
|
||||||
|
return nil, a.Err
|
||||||
|
}
|
||||||
|
return []ExternalImage{{
|
||||||
|
URL: "imageUrl",
|
||||||
|
Size: 100,
|
||||||
|
}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetSimilarArtists(_ context.Context, id, name, mbid string, limit int) ([]Artist, error) {
|
||||||
|
a.Args = []interface{}{id, name, mbid, limit}
|
||||||
|
if a.Err != nil {
|
||||||
|
return nil, a.Err
|
||||||
|
}
|
||||||
|
return []Artist{{
|
||||||
|
Name: "Joe Dohn",
|
||||||
|
MBID: "mbid321",
|
||||||
|
}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetArtistTopSongs(_ context.Context, id, artistName, mbid string, count int) ([]Song, error) {
|
||||||
|
a.Args = []interface{}{id, artistName, mbid, count}
|
||||||
|
if a.Err != nil {
|
||||||
|
return nil, a.Err
|
||||||
|
}
|
||||||
|
return []Song{{
|
||||||
|
Name: "A Song",
|
||||||
|
MBID: "mbid444",
|
||||||
|
}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*AlbumInfo, error) {
|
||||||
|
a.Args = []interface{}{name, artist, mbid}
|
||||||
|
if a.Err != nil {
|
||||||
|
return nil, a.Err
|
||||||
|
}
|
||||||
|
return &AlbumInfo{
|
||||||
|
Name: "A Song",
|
||||||
|
MBID: "mbid444",
|
||||||
|
Description: "A Description",
|
||||||
|
URL: "External URL",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type emptyAgent struct {
|
||||||
|
Interface
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *emptyAgent) AgentName() string {
|
||||||
|
return "empty"
|
||||||
|
}
|
||||||
|
|
||||||
|
type testImageAgent struct {
|
||||||
|
Name string
|
||||||
|
Images []ExternalImage
|
||||||
|
Err error
|
||||||
|
Args []interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testImageAgent) AgentName() string { return t.Name }
|
||||||
|
|
||||||
|
func (t *testImageAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||||
|
t.Args = []interface{}{id, name, mbid}
|
||||||
|
return t.Images, t.Err
|
||||||
|
}
|
||||||
218
core/agents/deezer/client.go
Normal file
218
core/agents/deezer/client.go
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
bytes "bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/microcosm-cc/bluemonday"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const apiBaseURL = "https://api.deezer.com"
|
||||||
|
const authBaseURL = "https://auth.deezer.com"
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNotFound = errors.New("deezer: not found")
|
||||||
|
)
|
||||||
|
|
||||||
|
type httpDoer interface {
|
||||||
|
Do(req *http.Request) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type client struct {
|
||||||
|
httpDoer httpDoer
|
||||||
|
language string
|
||||||
|
jwt jwtToken
|
||||||
|
}
|
||||||
|
|
||||||
|
func newClient(hc httpDoer, language string) *client {
|
||||||
|
return &client{
|
||||||
|
httpDoer: hc,
|
||||||
|
language: language,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("q", name)
|
||||||
|
params.Add("limit", strconv.Itoa(limit))
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.URL.RawQuery = params.Encode()
|
||||||
|
|
||||||
|
var results SearchArtistResults
|
||||||
|
err = c.makeRequest(req, &results)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(results.Data) == 0 {
|
||||||
|
return nil, ErrNotFound
|
||||||
|
}
|
||||||
|
return results.Data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) makeRequest(req *http.Request, response any) error {
|
||||||
|
log.Trace(req.Context(), fmt.Sprintf("Sending Deezer %s request", req.Method), "url", req.URL)
|
||||||
|
resp, err := c.httpDoer.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
data, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return c.parseError(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.Unmarshal(data, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) parseError(data []byte) error {
|
||||||
|
var deezerError Error
|
||||||
|
err := json.Unmarshal(data, &deezerError)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return fmt.Errorf("deezer error(%d): %s", deezerError.Error.Code, deezerError.Error.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) getRelatedArtists(ctx context.Context, artistID int) ([]Artist, error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/related", apiBaseURL, artistID), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var results RelatedArtists
|
||||||
|
err = c.makeRequest(req, &results)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return results.Data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) getTopTracks(ctx context.Context, artistID int, limit int) ([]Track, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("limit", strconv.Itoa(limit))
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/top", apiBaseURL, artistID), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.URL.RawQuery = params.Encode()
|
||||||
|
|
||||||
|
var results TopTracks
|
||||||
|
err = c.makeRequest(req, &results)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return results.Data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const pipeAPIURL = "https://pipe.deezer.com/api"
|
||||||
|
|
||||||
|
var strictPolicy = bluemonday.StrictPolicy()
|
||||||
|
|
||||||
|
func (c *client) getArtistBio(ctx context.Context, artistID int) (string, error) {
|
||||||
|
jwt, err := c.getJWT(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("deezer: failed to get JWT: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
query := map[string]any{
|
||||||
|
"operationName": "ArtistBio",
|
||||||
|
"variables": map[string]any{
|
||||||
|
"artistId": strconv.Itoa(artistID),
|
||||||
|
},
|
||||||
|
"query": `query ArtistBio($artistId: String!) {
|
||||||
|
artist(artistId: $artistId) {
|
||||||
|
bio {
|
||||||
|
full
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := json.Marshal(query)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "POST", pipeAPIURL, bytes.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("Accept-Language", c.language)
|
||||||
|
req.Header.Set("Authorization", "Bearer "+jwt)
|
||||||
|
|
||||||
|
log.Trace(ctx, "Fetching Deezer artist biography via GraphQL", "artistId", artistID, "language", c.language)
|
||||||
|
resp, err := c.httpDoer.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return "", fmt.Errorf("deezer: failed to fetch biography: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
type graphQLResponse struct {
|
||||||
|
Data struct {
|
||||||
|
Artist struct {
|
||||||
|
Bio struct {
|
||||||
|
Full string `json:"full"`
|
||||||
|
} `json:"bio"`
|
||||||
|
} `json:"artist"`
|
||||||
|
} `json:"data"`
|
||||||
|
Errors []struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var result graphQLResponse
|
||||||
|
if err := json.Unmarshal(data, &result); err != nil {
|
||||||
|
return "", fmt.Errorf("deezer: failed to parse GraphQL response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(result.Errors) > 0 {
|
||||||
|
var errs []error
|
||||||
|
for m := range result.Errors {
|
||||||
|
errs = append(errs, errors.New(result.Errors[m].Message))
|
||||||
|
}
|
||||||
|
err := errors.Join(errs...)
|
||||||
|
return "", fmt.Errorf("deezer: GraphQL error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Data.Artist.Bio.Full == "" {
|
||||||
|
return "", errors.New("deezer: biography not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return cleanBio(result.Data.Artist.Bio.Full), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanBio(bio string) string {
|
||||||
|
bio = strings.ReplaceAll(bio, "</p>", "\n")
|
||||||
|
return strictPolicy.Sanitize(bio)
|
||||||
|
}
|
||||||
101
core/agents/deezer/client_auth.go
Normal file
101
core/agents/deezer/client_auth.go
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
type jwtToken struct {
|
||||||
|
token string
|
||||||
|
expiresAt time.Time
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *jwtToken) get() (string, bool) {
|
||||||
|
j.mu.RLock()
|
||||||
|
defer j.mu.RUnlock()
|
||||||
|
if time.Now().Before(j.expiresAt) {
|
||||||
|
return j.token, true
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *jwtToken) set(token string, expiresIn time.Duration) {
|
||||||
|
j.mu.Lock()
|
||||||
|
defer j.mu.Unlock()
|
||||||
|
j.token = token
|
||||||
|
j.expiresAt = time.Now().Add(expiresIn)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) getJWT(ctx context.Context) (string, error) {
|
||||||
|
// Check if we have a valid cached token
|
||||||
|
if token, valid := c.jwt.get(); valid {
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch a new anonymous token
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", authBaseURL+"/login/anonymous?jo=p&rto=c", nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
|
||||||
|
resp, err := c.httpDoer.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return "", fmt.Errorf("deezer: failed to get JWT token: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
type authResponse struct {
|
||||||
|
JWT string `json:"jwt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var result authResponse
|
||||||
|
if err := json.Unmarshal(data, &result); err != nil {
|
||||||
|
return "", fmt.Errorf("deezer: failed to parse auth response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.JWT == "" {
|
||||||
|
return "", errors.New("deezer: no JWT token in response")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse JWT to get actual expiration time
|
||||||
|
token, err := jwt.ParseString(result.JWT, jwt.WithVerify(false), jwt.WithValidate(false))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("deezer: failed to parse JWT token: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate TTL with a 1-minute buffer for clock skew and network delays
|
||||||
|
expiresAt := token.Expiration()
|
||||||
|
if expiresAt.IsZero() {
|
||||||
|
return "", errors.New("deezer: JWT token has no expiration time")
|
||||||
|
}
|
||||||
|
|
||||||
|
ttl := time.Until(expiresAt) - 1*time.Minute
|
||||||
|
if ttl <= 0 {
|
||||||
|
return "", errors.New("deezer: JWT token already expired or expires too soon")
|
||||||
|
}
|
||||||
|
|
||||||
|
c.jwt.set(result.JWT, ttl)
|
||||||
|
log.Trace(ctx, "Fetched new Deezer JWT token", "expiresAt", expiresAt, "ttl", ttl)
|
||||||
|
|
||||||
|
return result.JWT, nil
|
||||||
|
}
|
||||||
293
core/agents/deezer/client_auth_test.go
Normal file
293
core/agents/deezer/client_auth_test.go
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("JWT Authentication", func() {
|
||||||
|
var httpClient *fakeHttpClient
|
||||||
|
var client *client
|
||||||
|
var ctx context.Context
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &fakeHttpClient{}
|
||||||
|
client = newClient(httpClient, "en")
|
||||||
|
ctx = context.Background()
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("getJWT", func() {
|
||||||
|
Context("with a valid JWT response", func() {
|
||||||
|
It("successfully fetches and caches a JWT token", func() {
|
||||||
|
testJWT := createTestJWT(5 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
token, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token).To(Equal(testJWT))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns the cached token on subsequent calls", func() {
|
||||||
|
testJWT := createTestJWT(5 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
// First call should fetch from API
|
||||||
|
token1, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token1).To(Equal(testJWT))
|
||||||
|
Expect(httpClient.lastRequest.URL.Path).To(Equal("/login/anonymous"))
|
||||||
|
|
||||||
|
// Second call should return cached token without hitting API
|
||||||
|
httpClient.lastRequest = nil // Clear last request to verify no new request is made
|
||||||
|
token2, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token2).To(Equal(testJWT))
|
||||||
|
Expect(httpClient.lastRequest).To(BeNil()) // No new request made
|
||||||
|
})
|
||||||
|
|
||||||
|
It("parses the JWT expiration time correctly", func() {
|
||||||
|
expectedExpiration := time.Now().Add(5 * time.Minute)
|
||||||
|
testToken, err := jwt.NewBuilder().
|
||||||
|
Expiration(expectedExpiration).
|
||||||
|
Build()
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
|
||||||
|
})
|
||||||
|
|
||||||
|
token, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token).ToNot(BeEmpty())
|
||||||
|
|
||||||
|
// Verify the token is cached until close to expiration
|
||||||
|
// The cache should expire 1 minute before the JWT expires
|
||||||
|
expectedCacheExpiry := expectedExpiration.Add(-1 * time.Minute)
|
||||||
|
Expect(client.jwt.expiresAt).To(BeTemporally("~", expectedCacheExpiry, 2*time.Second))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with JWT tokens that expire soon", func() {
|
||||||
|
It("rejects tokens that expire in less than 1 minute", func() {
|
||||||
|
// Create a token that expires in 30 seconds (less than 1-minute buffer)
|
||||||
|
testJWT := createTestJWT(30 * time.Second)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rejects already expired tokens", func() {
|
||||||
|
// Create a token that expired 1 minute ago
|
||||||
|
testJWT := createTestJWT(-1 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("accepts tokens that expire in more than 1 minute", func() {
|
||||||
|
// Create a token that expires in 2 minutes (just over the 1-minute buffer)
|
||||||
|
testJWT := createTestJWT(2 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
token, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token).ToNot(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with invalid responses", func() {
|
||||||
|
It("handles HTTP error responses", func() {
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 500,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("failed to get JWT token"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles malformed JSON responses", func() {
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{invalid json}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("failed to parse auth response"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles responses with empty JWT field", func() {
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":""}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(Equal("deezer: no JWT token in response"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles invalid JWT tokens", func() {
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":"not-a-valid-jwt"}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("failed to parse JWT token"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rejects JWT tokens without expiration", func() {
|
||||||
|
// Create a JWT without expiration claim
|
||||||
|
testToken, err := jwt.NewBuilder().
|
||||||
|
Claim("custom", "value").
|
||||||
|
Build()
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
// Verify token has no expiration
|
||||||
|
Expect(testToken.Expiration().IsZero()).To(BeTrue())
|
||||||
|
|
||||||
|
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err = client.getJWT(ctx)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(Equal("deezer: JWT token has no expiration time"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("token caching behavior", func() {
|
||||||
|
It("fetches a new token when the cached token expires", func() {
|
||||||
|
// First token expires in 5 minutes
|
||||||
|
firstJWT := createTestJWT(5 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, firstJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
token1, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token1).To(Equal(firstJWT))
|
||||||
|
|
||||||
|
// Manually expire the cached token
|
||||||
|
client.jwt.expiresAt = time.Now().Add(-1 * time.Second)
|
||||||
|
|
||||||
|
// Second token with different expiration (10 minutes)
|
||||||
|
secondJWT := createTestJWT(10 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, secondJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
token2, err := client.getJWT(ctx)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(token2).To(Equal(secondJWT))
|
||||||
|
Expect(token2).ToNot(Equal(token1))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("jwtToken cache", func() {
|
||||||
|
var cache *jwtToken
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
cache = &jwtToken{}
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns false for expired tokens", func() {
|
||||||
|
cache.set("test-token", -1*time.Second) // Already expired
|
||||||
|
token, valid := cache.get()
|
||||||
|
Expect(valid).To(BeFalse())
|
||||||
|
Expect(token).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns true for valid tokens", func() {
|
||||||
|
cache.set("test-token", 4*time.Minute)
|
||||||
|
token, valid := cache.get()
|
||||||
|
Expect(valid).To(BeTrue())
|
||||||
|
Expect(token).To(Equal("test-token"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("is thread-safe for concurrent access", func() {
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
|
||||||
|
// Writer goroutine
|
||||||
|
wg.Go(func() {
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
cache.set(fmt.Sprintf("token-%d", i), 1*time.Hour)
|
||||||
|
time.Sleep(1 * time.Millisecond)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Reader goroutine
|
||||||
|
wg.Go(func() {
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
cache.get()
|
||||||
|
time.Sleep(1 * time.Millisecond)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Wait for both goroutines to complete
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
// Verify final state is valid
|
||||||
|
token, valid := cache.get()
|
||||||
|
Expect(valid).To(BeTrue())
|
||||||
|
Expect(token).To(HavePrefix("token-"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// createTestJWT creates a valid JWT token for testing purposes
|
||||||
|
func createTestJWT(expiresIn time.Duration) string {
|
||||||
|
token, err := jwt.NewBuilder().
|
||||||
|
Expiration(time.Now().Add(expiresIn)).
|
||||||
|
Build()
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed to create test JWT: %v", err))
|
||||||
|
}
|
||||||
|
signed, err := jwt.Sign(token, jwt.WithInsecureNoSignature())
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed to sign test JWT: %v", err))
|
||||||
|
}
|
||||||
|
return string(signed)
|
||||||
|
}
|
||||||
195
core/agents/deezer/client_test.go
Normal file
195
core/agents/deezer/client_test.go
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("client", func() {
|
||||||
|
var httpClient *fakeHttpClient
|
||||||
|
var client *client
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &fakeHttpClient{}
|
||||||
|
client = newClient(httpClient, "en")
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ArtistImages", func() {
|
||||||
|
It("returns artist images from a successful request", func() {
|
||||||
|
f, err := os.Open("tests/fixtures/deezer.search.artist.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
httpClient.mock("https://api.deezer.com/search/artist", http.Response{Body: f, StatusCode: 200})
|
||||||
|
|
||||||
|
artists, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(artists).To(HaveLen(17))
|
||||||
|
Expect(artists[0].Name).To(Equal("Michael Jackson"))
|
||||||
|
Expect(artists[0].PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if artist was not found", func() {
|
||||||
|
httpClient.mock("https://api.deezer.com/search/artist", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"data":[],"total":0}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
|
||||||
|
Expect(err).To(MatchError(ErrNotFound))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ArtistBio", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
// Mock the JWT token endpoint with a valid JWT that expires in 5 minutes
|
||||||
|
testJWT := createTestJWT(5 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns artist bio from a successful request", func() {
|
||||||
|
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||||
|
|
||||||
|
bio, err := client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(bio).To(ContainSubstring("Schoolmates Thomas and Guy-Manuel"))
|
||||||
|
Expect(bio).ToNot(ContainSubstring("<p>"))
|
||||||
|
Expect(bio).ToNot(ContainSubstring("</p>"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses the configured language", func() {
|
||||||
|
client = newClient(httpClient, "fr")
|
||||||
|
// Mock JWT token for the new client instance with a valid JWT
|
||||||
|
testJWT := createTestJWT(5 * time.Minute)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
|
||||||
|
})
|
||||||
|
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||||
|
|
||||||
|
_, err = client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(httpClient.lastRequest.Header.Get("Accept-Language")).To(Equal("fr"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("includes the JWT token in the request", func() {
|
||||||
|
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||||
|
|
||||||
|
_, err = client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
// Verify that the Authorization header has the Bearer token format
|
||||||
|
authHeader := httpClient.lastRequest.Header.Get("Authorization")
|
||||||
|
Expect(authHeader).To(HavePrefix("Bearer "))
|
||||||
|
Expect(len(authHeader)).To(BeNumerically(">", 20)) // JWT tokens are longer than 20 chars
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles GraphQL errors", func() {
|
||||||
|
errorResponse := `{
|
||||||
|
"data": {
|
||||||
|
"artist": {
|
||||||
|
"bio": {
|
||||||
|
"full": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"message": "Artist not found"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"message": "Invalid artist ID"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
httpClient.mock("https://pipe.deezer.com/api", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(errorResponse)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getArtistBio(GinkgoT().Context(), 999)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("GraphQL error"))
|
||||||
|
Expect(err.Error()).To(ContainSubstring("Artist not found"))
|
||||||
|
Expect(err.Error()).To(ContainSubstring("Invalid artist ID"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles empty biography", func() {
|
||||||
|
emptyBioResponse := `{
|
||||||
|
"data": {
|
||||||
|
"artist": {
|
||||||
|
"bio": {
|
||||||
|
"full": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
httpClient.mock("https://pipe.deezer.com/api", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(emptyBioResponse)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(MatchError("deezer: biography not found"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles JWT token fetch failure", func() {
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 500,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("failed to get JWT"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("handles JWT token that expires too soon", func() {
|
||||||
|
// Create a JWT that expires in 30 seconds (less than the 1-minute buffer)
|
||||||
|
expiredJWT := createTestJWT(30 * time.Second)
|
||||||
|
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||||
|
StatusCode: 200,
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, expiredJWT))),
|
||||||
|
})
|
||||||
|
|
||||||
|
_, err := client.getArtistBio(GinkgoT().Context(), 27)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
type fakeHttpClient struct {
|
||||||
|
responses map[string]*http.Response
|
||||||
|
lastRequest *http.Request
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *fakeHttpClient) mock(url string, response http.Response) {
|
||||||
|
if c.responses == nil {
|
||||||
|
c.responses = make(map[string]*http.Response)
|
||||||
|
}
|
||||||
|
c.responses[url] = &response
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *fakeHttpClient) Do(req *http.Request) (*http.Response, error) {
|
||||||
|
c.lastRequest = req
|
||||||
|
u := req.URL
|
||||||
|
u.RawQuery = ""
|
||||||
|
if resp, ok := c.responses[u.String()]; ok {
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
panic("URL not mocked: " + u.String())
|
||||||
|
}
|
||||||
148
core/agents/deezer/deezer.go
Normal file
148
core/agents/deezer/deezer.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/utils/cache"
|
||||||
|
"github.com/navidrome/navidrome/utils/slice"
|
||||||
|
)
|
||||||
|
|
||||||
|
const deezerAgentName = "deezer"
|
||||||
|
const deezerApiPictureXlSize = 1000
|
||||||
|
const deezerApiPictureBigSize = 500
|
||||||
|
const deezerApiPictureMediumSize = 250
|
||||||
|
const deezerApiPictureSmallSize = 56
|
||||||
|
const deezerArtistSearchLimit = 50
|
||||||
|
|
||||||
|
type deezerAgent struct {
|
||||||
|
dataStore model.DataStore
|
||||||
|
client *client
|
||||||
|
}
|
||||||
|
|
||||||
|
func deezerConstructor(dataStore model.DataStore) agents.Interface {
|
||||||
|
agent := &deezerAgent{dataStore: dataStore}
|
||||||
|
httpClient := &http.Client{
|
||||||
|
Timeout: consts.DefaultHttpClientTimeOut,
|
||||||
|
}
|
||||||
|
cachedHttpClient := cache.NewHTTPClient(httpClient, consts.DefaultHttpClientTimeOut)
|
||||||
|
agent.client = newClient(cachedHttpClient, conf.Server.Deezer.Language)
|
||||||
|
return agent
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) AgentName() string {
|
||||||
|
return deezerAgentName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) GetArtistImages(ctx context.Context, _, name, _ string) ([]agents.ExternalImage, error) {
|
||||||
|
artist, err := s.searchArtist(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, agents.ErrNotFound) {
|
||||||
|
log.Warn(ctx, "Artist not found in deezer", "artist", name)
|
||||||
|
} else {
|
||||||
|
log.Error(ctx, "Error calling deezer", "artist", name, err)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var res []agents.ExternalImage
|
||||||
|
possibleImages := []struct {
|
||||||
|
URL string
|
||||||
|
Size int
|
||||||
|
}{
|
||||||
|
{artist.PictureXl, deezerApiPictureXlSize},
|
||||||
|
{artist.PictureBig, deezerApiPictureBigSize},
|
||||||
|
{artist.PictureMedium, deezerApiPictureMediumSize},
|
||||||
|
{artist.PictureSmall, deezerApiPictureSmallSize},
|
||||||
|
}
|
||||||
|
for _, imgData := range possibleImages {
|
||||||
|
if imgData.URL != "" {
|
||||||
|
res = append(res, agents.ExternalImage{
|
||||||
|
URL: imgData.URL,
|
||||||
|
Size: imgData.Size,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, error) {
|
||||||
|
artists, err := s.client.searchArtists(ctx, name, deezerArtistSearchLimit)
|
||||||
|
if errors.Is(err, ErrNotFound) || len(artists) == 0 {
|
||||||
|
return nil, agents.ErrNotFound
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the first one has the same name, that's the one
|
||||||
|
if !strings.EqualFold(artists[0].Name, name) {
|
||||||
|
return nil, agents.ErrNotFound
|
||||||
|
}
|
||||||
|
return &artists[0], err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) GetSimilarArtists(ctx context.Context, _, name, _ string, limit int) ([]agents.Artist, error) {
|
||||||
|
artist, err := s.searchArtist(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
related, err := s.client.getRelatedArtists(ctx, artist.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res := slice.Map(related, func(r Artist) agents.Artist {
|
||||||
|
return agents.Artist{
|
||||||
|
Name: r.Name,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
if len(res) > limit {
|
||||||
|
res = res[:limit]
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) GetArtistTopSongs(ctx context.Context, _, artistName, _ string, count int) ([]agents.Song, error) {
|
||||||
|
artist, err := s.searchArtist(ctx, artistName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
tracks, err := s.client.getTopTracks(ctx, artist.ID, count)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res := slice.Map(tracks, func(r Track) agents.Song {
|
||||||
|
return agents.Song{
|
||||||
|
Name: r.Title,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *deezerAgent) GetArtistBiography(ctx context.Context, _, name, _ string) (string, error) {
|
||||||
|
artist, err := s.searchArtist(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.client.getArtistBio(ctx, artist.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
conf.AddHook(func() {
|
||||||
|
if conf.Server.Deezer.Enabled {
|
||||||
|
agents.Register(deezerAgentName, deezerConstructor)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
17
core/agents/deezer/deezer_suite_test.go
Normal file
17
core/agents/deezer/deezer_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDeezer(t *testing.T) {
|
||||||
|
tests.Init(t, false)
|
||||||
|
log.SetLevel(log.LevelFatal)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Deezer Test Suite")
|
||||||
|
}
|
||||||
66
core/agents/deezer/responses.go
Normal file
66
core/agents/deezer/responses.go
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
type SearchArtistResults struct {
|
||||||
|
Data []Artist `json:"data"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
Next string `json:"next"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Artist struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Link string `json:"link"`
|
||||||
|
Picture string `json:"picture"`
|
||||||
|
PictureSmall string `json:"picture_small"`
|
||||||
|
PictureMedium string `json:"picture_medium"`
|
||||||
|
PictureBig string `json:"picture_big"`
|
||||||
|
PictureXl string `json:"picture_xl"`
|
||||||
|
NbAlbum int `json:"nb_album"`
|
||||||
|
NbFan int `json:"nb_fan"`
|
||||||
|
Radio bool `json:"radio"`
|
||||||
|
Tracklist string `json:"tracklist"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Error struct {
|
||||||
|
Error struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Code int `json:"code"`
|
||||||
|
} `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RelatedArtists struct {
|
||||||
|
Data []Artist `json:"data"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TopTracks struct {
|
||||||
|
Data []Track `json:"data"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
Next string `json:"next"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Track struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Link string `json:"link"`
|
||||||
|
Duration int `json:"duration"`
|
||||||
|
Rank int `json:"rank"`
|
||||||
|
Preview string `json:"preview"`
|
||||||
|
Artist Artist `json:"artist"`
|
||||||
|
Album Album `json:"album"`
|
||||||
|
Contributors []Artist `json:"contributors"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Album struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Cover string `json:"cover"`
|
||||||
|
CoverSmall string `json:"cover_small"`
|
||||||
|
CoverMedium string `json:"cover_medium"`
|
||||||
|
CoverBig string `json:"cover_big"`
|
||||||
|
CoverXl string `json:"cover_xl"`
|
||||||
|
Tracklist string `json:"tracklist"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
69
core/agents/deezer/responses_test.go
Normal file
69
core/agents/deezer/responses_test.go
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
package deezer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Responses", func() {
|
||||||
|
Describe("Search type=artist", func() {
|
||||||
|
It("parses the artist search result correctly ", func() {
|
||||||
|
var resp SearchArtistResults
|
||||||
|
body, err := os.ReadFile("tests/fixtures/deezer.search.artist.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
err = json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.Data).To(HaveLen(17))
|
||||||
|
michael := resp.Data[0]
|
||||||
|
Expect(michael.Name).To(Equal("Michael Jackson"))
|
||||||
|
Expect(michael.PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Error", func() {
|
||||||
|
It("parses the error response correctly", func() {
|
||||||
|
var errorResp Error
|
||||||
|
body := []byte(`{"error":{"type":"MissingParameterException","message":"Missing parameters: q","code":501}}`)
|
||||||
|
err := json.Unmarshal(body, &errorResp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(errorResp.Error.Code).To(Equal(501))
|
||||||
|
Expect(errorResp.Error.Message).To(Equal("Missing parameters: q"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Related Artists", func() {
|
||||||
|
It("parses the related artists response correctly", func() {
|
||||||
|
var resp RelatedArtists
|
||||||
|
body, err := os.ReadFile("tests/fixtures/deezer.artist.related.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
err = json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.Data).To(HaveLen(20))
|
||||||
|
justice := resp.Data[0]
|
||||||
|
Expect(justice.Name).To(Equal("Justice"))
|
||||||
|
Expect(justice.ID).To(Equal(6404))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Top Tracks", func() {
|
||||||
|
It("parses the top tracks response correctly", func() {
|
||||||
|
var resp TopTracks
|
||||||
|
body, err := os.ReadFile("tests/fixtures/deezer.artist.top.json")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
err = json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.Data).To(HaveLen(5))
|
||||||
|
track := resp.Data[0]
|
||||||
|
Expect(track.Title).To(Equal("Instant Crush (feat. Julian Casablancas)"))
|
||||||
|
Expect(track.ID).To(Equal(67238732))
|
||||||
|
Expect(track.Album.Title).To(Equal("Random Access Memories"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
84
core/agents/interfaces.go
Normal file
84
core/agents/interfaces.go
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
package agents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Constructor func(ds model.DataStore) Interface
|
||||||
|
|
||||||
|
type Interface interface {
|
||||||
|
AgentName() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// AlbumInfo contains album metadata (no images)
|
||||||
|
type AlbumInfo struct {
|
||||||
|
Name string
|
||||||
|
MBID string
|
||||||
|
Description string
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Artist struct {
|
||||||
|
Name string
|
||||||
|
MBID string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ExternalImage struct {
|
||||||
|
URL string
|
||||||
|
Size int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Song struct {
|
||||||
|
Name string
|
||||||
|
MBID string
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNotFound = errors.New("not found")
|
||||||
|
)
|
||||||
|
|
||||||
|
// AlbumInfoRetriever provides album info (no images)
|
||||||
|
type AlbumInfoRetriever interface {
|
||||||
|
GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*AlbumInfo, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AlbumImageRetriever provides album images
|
||||||
|
type AlbumImageRetriever interface {
|
||||||
|
GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]ExternalImage, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistMBIDRetriever interface {
|
||||||
|
GetArtistMBID(ctx context.Context, id string, name string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistURLRetriever interface {
|
||||||
|
GetArtistURL(ctx context.Context, id, name, mbid string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistBiographyRetriever interface {
|
||||||
|
GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistSimilarRetriever interface {
|
||||||
|
GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]Artist, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistImageRetriever interface {
|
||||||
|
GetArtistImages(ctx context.Context, id, name, mbid string) ([]ExternalImage, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ArtistTopSongsRetriever interface {
|
||||||
|
GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]Song, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var Map map[string]Constructor
|
||||||
|
|
||||||
|
func Register(name string, init Constructor) {
|
||||||
|
if Map == nil {
|
||||||
|
Map = make(map[string]Constructor)
|
||||||
|
}
|
||||||
|
Map[name] = init
|
||||||
|
}
|
||||||
383
core/agents/lastfm/agent.go
Normal file
383
core/agents/lastfm/agent.go
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/andybalholm/cascadia"
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/core/scrobbler"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/utils/cache"
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
lastFMAgentName = "lastfm"
|
||||||
|
sessionKeyProperty = "LastFMSessionKey"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ignoredBiographies = []string{
|
||||||
|
// Unknown Artist
|
||||||
|
`<a href="https://www.last.fm/music/`,
|
||||||
|
}
|
||||||
|
|
||||||
|
type lastfmAgent struct {
|
||||||
|
ds model.DataStore
|
||||||
|
sessionKeys *agents.SessionKeys
|
||||||
|
apiKey string
|
||||||
|
secret string
|
||||||
|
lang string
|
||||||
|
client *client
|
||||||
|
httpClient httpDoer
|
||||||
|
getInfoMutex sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func lastFMConstructor(ds model.DataStore) *lastfmAgent {
|
||||||
|
if !conf.Server.LastFM.Enabled || conf.Server.LastFM.ApiKey == "" || conf.Server.LastFM.Secret == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
l := &lastfmAgent{
|
||||||
|
ds: ds,
|
||||||
|
lang: conf.Server.LastFM.Language,
|
||||||
|
apiKey: conf.Server.LastFM.ApiKey,
|
||||||
|
secret: conf.Server.LastFM.Secret,
|
||||||
|
sessionKeys: &agents.SessionKeys{DataStore: ds, KeyName: sessionKeyProperty},
|
||||||
|
}
|
||||||
|
hc := &http.Client{
|
||||||
|
Timeout: consts.DefaultHttpClientTimeOut,
|
||||||
|
}
|
||||||
|
chc := cache.NewHTTPClient(hc, consts.DefaultHttpClientTimeOut)
|
||||||
|
l.httpClient = chc
|
||||||
|
l.client = newClient(l.apiKey, l.secret, l.lang, chc)
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) AgentName() string {
|
||||||
|
return lastFMAgentName
|
||||||
|
}
|
||||||
|
|
||||||
|
var imageRegex = regexp.MustCompile(`u\/(\d+)`)
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||||
|
a, err := l.callAlbumGetInfo(ctx, name, artist, mbid)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &agents.AlbumInfo{
|
||||||
|
Name: a.Name,
|
||||||
|
MBID: a.MBID,
|
||||||
|
Description: a.Description.Summary,
|
||||||
|
URL: a.URL,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]agents.ExternalImage, error) {
|
||||||
|
a, err := l.callAlbumGetInfo(ctx, name, artist, mbid)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last.fm can return duplicate sizes.
|
||||||
|
seenSizes := map[int]bool{}
|
||||||
|
images := make([]agents.ExternalImage, 0)
|
||||||
|
|
||||||
|
// This assumes that Last.fm returns images with size small, medium, and large.
|
||||||
|
// This is true as of December 29, 2022
|
||||||
|
for _, img := range a.Image {
|
||||||
|
size := imageRegex.FindStringSubmatch(img.URL)
|
||||||
|
// Last.fm can return images without URL
|
||||||
|
if len(size) == 0 || len(size[0]) < 4 {
|
||||||
|
log.Trace(ctx, "LastFM/albuminfo image URL does not match expected regex or is empty", "url", img.URL, "size", img.Size)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
numericSize, err := strconv.Atoi(size[0][2:])
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "LastFM/albuminfo image URL does not match expected regex", "url", img.URL, "size", img.Size, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _, exists := seenSizes[numericSize]; !exists {
|
||||||
|
images = append(images, agents.ExternalImage{
|
||||||
|
Size: numericSize,
|
||||||
|
URL: img.URL,
|
||||||
|
})
|
||||||
|
seenSizes[numericSize] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return images, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||||
|
a, err := l.callArtistGetInfo(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if a.MBID == "" {
|
||||||
|
return "", agents.ErrNotFound
|
||||||
|
}
|
||||||
|
return a.MBID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||||
|
a, err := l.callArtistGetInfo(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if a.URL == "" {
|
||||||
|
return "", agents.ErrNotFound
|
||||||
|
}
|
||||||
|
return a.URL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||||
|
a, err := l.callArtistGetInfo(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
a.Bio.Summary = strings.TrimSpace(a.Bio.Summary)
|
||||||
|
if a.Bio.Summary == "" {
|
||||||
|
return "", agents.ErrNotFound
|
||||||
|
}
|
||||||
|
for _, ign := range ignoredBiographies {
|
||||||
|
if strings.HasPrefix(a.Bio.Summary, ign) {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return a.Bio.Summary, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||||
|
resp, err := l.callArtistGetSimilar(ctx, name, limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(resp) == 0 {
|
||||||
|
return nil, agents.ErrNotFound
|
||||||
|
}
|
||||||
|
var res []agents.Artist
|
||||||
|
for _, a := range resp {
|
||||||
|
res = append(res, agents.Artist{
|
||||||
|
Name: a.Name,
|
||||||
|
MBID: a.MBID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||||
|
resp, err := l.callArtistGetTopTracks(ctx, artistName, count)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(resp) == 0 {
|
||||||
|
return nil, agents.ErrNotFound
|
||||||
|
}
|
||||||
|
var res []agents.Song
|
||||||
|
for _, t := range resp {
|
||||||
|
res = append(res, agents.Song{
|
||||||
|
Name: t.Name,
|
||||||
|
MBID: t.MBID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
|
||||||
|
artistIgnoredImage = "2a96cbd8b46e442fc41c2b86b821562f" // Last.fm artist placeholder image name
|
||||||
|
)
|
||||||
|
|
||||||
|
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
|
||||||
|
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
|
||||||
|
a, err := l.callArtistGetInfo(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("get artist info: %w", err)
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.URL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("create artist image request: %w", err)
|
||||||
|
}
|
||||||
|
resp, err := l.httpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("get artist url: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
node, err := html.Parse(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("parse html: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var res []agents.ExternalImage
|
||||||
|
n := cascadia.Query(node, artistOpenGraphQuery)
|
||||||
|
if n == nil {
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
for _, attr := range n.Attr {
|
||||||
|
if attr.Key != "content" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.Contains(attr.Val, artistIgnoredImage) {
|
||||||
|
log.Debug(ctx, "Artist image is ignored default image", "name", name, "url", attr.Val)
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
res = []agents.ExternalImage{
|
||||||
|
{URL: attr.Val},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid string) (*Album, error) {
|
||||||
|
a, err := l.client.albumGetInfo(ctx, name, artist, mbid)
|
||||||
|
var lfErr *lastFMError
|
||||||
|
isLastFMError := errors.As(err, &lfErr)
|
||||||
|
|
||||||
|
if mbid != "" && (isLastFMError && lfErr.Code == 6) {
|
||||||
|
log.Debug(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||||
|
return l.callAlbumGetInfo(ctx, name, artist, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if isLastFMError && lfErr.Code == 6 {
|
||||||
|
log.Debug(ctx, "Album not found", "album", name, "mbid", mbid, err)
|
||||||
|
} else {
|
||||||
|
log.Error(ctx, "Error calling LastFM/album.getInfo", "album", name, "mbid", mbid, err)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||||
|
l.getInfoMutex.Lock()
|
||||||
|
defer l.getInfoMutex.Unlock()
|
||||||
|
|
||||||
|
a, err := l.client.artistGetInfo(ctx, name)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||||
|
s, err := l.client.artistGetSimilar(ctx, name, limit)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return s.Artists, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName string, count int) ([]Track, error) {
|
||||||
|
t, err := l.client.artistGetTopTracks(ctx, artistName, count)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return t.Track, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile, role model.Role, displayName string) string {
|
||||||
|
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[role]) > 0 {
|
||||||
|
return track.Participants[role][0].Name
|
||||||
|
}
|
||||||
|
return displayName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||||
|
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||||
|
if err != nil || sk == "" {
|
||||||
|
return scrobbler.ErrNotAuthorized
|
||||||
|
}
|
||||||
|
|
||||||
|
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
||||||
|
artist: l.getArtistForScrobble(track, model.RoleArtist, track.Artist),
|
||||||
|
track: track.Title,
|
||||||
|
album: track.Album,
|
||||||
|
trackNumber: track.TrackNumber,
|
||||||
|
mbid: track.MbzRecordingID,
|
||||||
|
duration: int(track.Duration),
|
||||||
|
albumArtist: l.getArtistForScrobble(track, model.RoleAlbumArtist, track.AlbumArtist),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||||
|
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||||
|
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||||
|
if err != nil || sk == "" {
|
||||||
|
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Duration <= 30 {
|
||||||
|
log.Debug(ctx, "Skipping Last.fm scrobble for short song", "track", s.Title, "duration", s.Duration)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
||||||
|
artist: l.getArtistForScrobble(&s.MediaFile, model.RoleArtist, s.Artist),
|
||||||
|
track: s.Title,
|
||||||
|
album: s.Album,
|
||||||
|
trackNumber: s.TrackNumber,
|
||||||
|
mbid: s.MbzRecordingID,
|
||||||
|
duration: int(s.Duration),
|
||||||
|
albumArtist: l.getArtistForScrobble(&s.MediaFile, model.RoleAlbumArtist, s.AlbumArtist),
|
||||||
|
timestamp: s.TimeStamp,
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var lfErr *lastFMError
|
||||||
|
isLastFMError := errors.As(err, &lfErr)
|
||||||
|
if !isLastFMError {
|
||||||
|
log.Warn(ctx, "Last.fm client.scrobble returned error", "track", s.Title, err)
|
||||||
|
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||||
|
}
|
||||||
|
if lfErr.Code == 11 || lfErr.Code == 16 {
|
||||||
|
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||||
|
}
|
||||||
|
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||||
|
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||||
|
return err == nil && sk != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
conf.AddHook(func() {
|
||||||
|
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||||
|
// This is a workaround for the fact that a (Interface)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||||
|
// See https://go.dev/doc/faq#nil_error
|
||||||
|
a := lastFMConstructor(ds)
|
||||||
|
if a != nil {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||||
|
// Same as above - this is a workaround for the fact that a (Scrobbler)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||||
|
// See https://go.dev/doc/faq#nil_error
|
||||||
|
a := lastFMConstructor(ds)
|
||||||
|
if a != nil {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
487
core/agents/lastfm/agent_test.go
Normal file
487
core/agents/lastfm/agent_test.go
Normal file
@@ -0,0 +1,487 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/conf/configtest"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/core/scrobbler"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
lastfmError3 = `{"error":3,"message":"Invalid Method - No method with that name in this package","links":[]}`
|
||||||
|
lastfmError6 = `{"error":6,"message":"The artist you supplied could not be found","links":[]}`
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("lastfmAgent", func() {
|
||||||
|
var ds model.DataStore
|
||||||
|
var ctx context.Context
|
||||||
|
BeforeEach(func() {
|
||||||
|
ds = &tests.MockDataStore{}
|
||||||
|
ctx = context.Background()
|
||||||
|
DeferCleanup(configtest.SetupConfig())
|
||||||
|
conf.Server.LastFM.Enabled = true
|
||||||
|
conf.Server.LastFM.ApiKey = "123"
|
||||||
|
conf.Server.LastFM.Secret = "secret"
|
||||||
|
})
|
||||||
|
Describe("lastFMConstructor", func() {
|
||||||
|
When("Agent is properly configured", func() {
|
||||||
|
It("uses configured api key and language", func() {
|
||||||
|
conf.Server.LastFM.Language = "pt"
|
||||||
|
agent := lastFMConstructor(ds)
|
||||||
|
Expect(agent.apiKey).To(Equal("123"))
|
||||||
|
Expect(agent.secret).To(Equal("secret"))
|
||||||
|
Expect(agent.lang).To(Equal("pt"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
When("Agent is disabled", func() {
|
||||||
|
It("returns nil", func() {
|
||||||
|
conf.Server.LastFM.Enabled = false
|
||||||
|
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
When("ApiKey is empty", func() {
|
||||||
|
It("returns nil", func() {
|
||||||
|
conf.Server.LastFM.ApiKey = ""
|
||||||
|
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
When("Secret is empty", func() {
|
||||||
|
It("returns nil", func() {
|
||||||
|
conf.Server.LastFM.Secret = ""
|
||||||
|
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistBiography", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "pt", httpClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns the biography", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
Expect(agent.GetArtistBiography(ctx, "123", "U2", "")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call fails", func() {
|
||||||
|
httpClient.Err = errors.New("error")
|
||||||
|
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call returns an error", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||||
|
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetSimilarArtists", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "pt", httpClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns similar artists", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "", 2)).To(Equal([]agents.Artist{
|
||||||
|
{Name: "Passengers", MBID: "e110c11f-1c94-4471-a350-c38f46b29389"},
|
||||||
|
{Name: "INXS", MBID: "481bf5f9-2e7c-4c44-b08a-05b32bc7c00d"},
|
||||||
|
}))
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call fails", func() {
|
||||||
|
httpClient.Err = errors.New("error")
|
||||||
|
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call returns an error", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||||
|
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistTopSongs", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "pt", httpClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns top songs", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)).To(Equal([]agents.Song{
|
||||||
|
{Name: "Beautiful Day", MBID: "f7f264d0-a89b-4682-9cd7-a4e7c37637af"},
|
||||||
|
{Name: "With or Without You", MBID: "6b9a509f-6907-4a6e-9345-2f12da09ba4b"},
|
||||||
|
}))
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call fails", func() {
|
||||||
|
httpClient.Err = errors.New("error")
|
||||||
|
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call returns an error", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||||
|
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Scrobbling", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
var track *model.MediaFile
|
||||||
|
BeforeEach(func() {
|
||||||
|
_ = ds.UserProps(ctx).Put("user-1", sessionKeyProperty, "SK-1")
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "en", httpClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
track = &model.MediaFile{
|
||||||
|
ID: "123",
|
||||||
|
Title: "Track Title",
|
||||||
|
Album: "Track Album",
|
||||||
|
Artist: "Track Artist",
|
||||||
|
AlbumArtist: "Track AlbumArtist",
|
||||||
|
TrackNumber: 1,
|
||||||
|
Duration: 180,
|
||||||
|
MbzRecordingID: "mbz-123",
|
||||||
|
Participants: map[model.Role]model.ParticipantList{
|
||||||
|
model.RoleArtist: []model.Participant{
|
||||||
|
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
||||||
|
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
||||||
|
},
|
||||||
|
model.RoleAlbumArtist: []model.Participant{
|
||||||
|
{Artist: model.Artist{ID: "ar-1", Name: "First Album Artist"}},
|
||||||
|
{Artist: model.Artist{ID: "ar-2", Name: "Second Album Artist"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("NowPlaying", func() {
|
||||||
|
It("calls Last.fm with correct params", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(httpClient.SavedRequest.Method).To(Equal(http.MethodPost))
|
||||||
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(sentParams.Get("method")).To(Equal("track.updateNowPlaying"))
|
||||||
|
Expect(sentParams.Get("sk")).To(Equal("SK-1"))
|
||||||
|
Expect(sentParams.Get("track")).To(Equal(track.Title))
|
||||||
|
Expect(sentParams.Get("album")).To(Equal(track.Album))
|
||||||
|
Expect(sentParams.Get("artist")).To(Equal(track.Artist))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal(track.AlbumArtist))
|
||||||
|
Expect(sentParams.Get("trackNumber")).To(Equal(strconv.Itoa(track.TrackNumber)))
|
||||||
|
Expect(sentParams.Get("duration")).To(Equal(strconv.FormatFloat(float64(track.Duration), 'G', -1, 32)))
|
||||||
|
Expect(sentParams.Get("mbid")).To(Equal(track.MbzRecordingID))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||||
|
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||||
|
})
|
||||||
|
|
||||||
|
When("ScrobbleFirstArtistOnly is true", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses only the first artist", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("scrobble", func() {
|
||||||
|
It("calls Last.fm with correct params", func() {
|
||||||
|
ts := time.Now()
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: ts})
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(httpClient.SavedRequest.Method).To(Equal(http.MethodPost))
|
||||||
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(sentParams.Get("method")).To(Equal("track.scrobble"))
|
||||||
|
Expect(sentParams.Get("sk")).To(Equal("SK-1"))
|
||||||
|
Expect(sentParams.Get("track")).To(Equal(track.Title))
|
||||||
|
Expect(sentParams.Get("album")).To(Equal(track.Album))
|
||||||
|
Expect(sentParams.Get("artist")).To(Equal(track.Artist))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal(track.AlbumArtist))
|
||||||
|
Expect(sentParams.Get("trackNumber")).To(Equal(strconv.Itoa(track.TrackNumber)))
|
||||||
|
Expect(sentParams.Get("duration")).To(Equal(strconv.FormatFloat(float64(track.Duration), 'G', -1, 32)))
|
||||||
|
Expect(sentParams.Get("mbid")).To(Equal(track.MbzRecordingID))
|
||||||
|
Expect(sentParams.Get("timestamp")).To(Equal(strconv.FormatInt(ts.Unix(), 10)))
|
||||||
|
})
|
||||||
|
|
||||||
|
When("ScrobbleFirstArtistOnly is true", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||||
|
})
|
||||||
|
|
||||||
|
It("uses only the first artist", func() {
|
||||||
|
ts := time.Now()
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: ts})
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
sentParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||||
|
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("skips songs with less than 31 seconds", func() {
|
||||||
|
track.Duration = 29
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(httpClient.SavedRequest).To(BeNil())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||||
|
err := agent.Scrobble(ctx, "user-2", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrRetryLater on error 11", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":11,"message":"Service Offline - This service is temporarily offline. Try again later."}`)),
|
||||||
|
StatusCode: 400,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrRetryLater on error 16", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":16,"message":"There was a temporary error processing your request. Please try again"}`)),
|
||||||
|
StatusCode: 400,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrRetryLater on http errors", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`internal server error`)),
|
||||||
|
StatusCode: 500,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns ErrUnrecoverable on other errors", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":8,"message":"Operation failed - Something else went wrong"}`)),
|
||||||
|
StatusCode: 400,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||||
|
Expect(err).To(MatchError(scrobbler.ErrUnrecoverable))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetAlbumInfo", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "pt", httpClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns the biography", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
Expect(agent.GetAlbumInfo(ctx, "Believe", "Cher", "03c91c40-49a6-44a7-90e7-a700edf97a62")).To(Equal(&agents.AlbumInfo{
|
||||||
|
Name: "Believe",
|
||||||
|
MBID: "03c91c40-49a6-44a7-90e7-a700edf97a62",
|
||||||
|
Description: "Believe is the twenty-third studio album by American singer-actress Cher, released on November 10, 1998 by Warner Bros. Records. The RIAA certified it Quadruple Platinum on December 23, 1999, recognizing four million shipments in the United States; Worldwide, the album has sold more than 20 million copies, making it the biggest-selling album of her career. In 1999 the album received three Grammy Awards nominations including \"Record of the Year\", \"Best Pop Album\" and winning \"Best Dance Recording\" for the single \"Believe\". It was released by Warner Bros. Records at the end of 1998. The album was executive produced by Rob <a href=\"https://www.last.fm/music/Cher/Believe\">Read more on Last.fm</a>.",
|
||||||
|
URL: "https://www.last.fm/music/Cher/Believe",
|
||||||
|
}))
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("03c91c40-49a6-44a7-90e7-a700edf97a62"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns empty images if no images are available", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty_urls.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
Expect(agent.GetAlbumInfo(ctx, "The Definitive Less Damage And More Joy", "The Jesus and Mary Chain", "")).To(Equal(&agents.AlbumInfo{
|
||||||
|
Name: "The Definitive Less Damage And More Joy",
|
||||||
|
URL: "https://www.last.fm/music/The+Jesus+and+Mary+Chain/The+Definitive+Less+Damage+And+More+Joy",
|
||||||
|
}))
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("album")).To(Equal("The Definitive Less Damage And More Joy"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call fails", func() {
|
||||||
|
httpClient.Err = errors.New("error")
|
||||||
|
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call returns an error", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||||
|
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||||
|
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("MBID non existent in Last.fm", func() {
|
||||||
|
It("calls again when last.fm returns an error 6", func() {
|
||||||
|
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||||
|
_, _ = agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||||
|
Expect(httpClient.RequestCount).To(Equal(2))
|
||||||
|
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetArtistImages", func() {
|
||||||
|
var agent *lastfmAgent
|
||||||
|
var apiClient *tests.FakeHttpClient
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
apiClient = &tests.FakeHttpClient{}
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client := newClient("API_KEY", "SECRET", "pt", apiClient)
|
||||||
|
agent = lastFMConstructor(ds)
|
||||||
|
agent.client = client
|
||||||
|
agent.httpClient = httpClient
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns the artist image from the page", func() {
|
||||||
|
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||||
|
|
||||||
|
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.html")
|
||||||
|
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||||
|
|
||||||
|
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(images).To(HaveLen(1))
|
||||||
|
Expect(images[0].URL).To(Equal("https://lastfm.freetls.fastly.net/i/u/ar0/818148bf682d429dc21b59a73ef6f68e.png"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns empty list if image is the ignored default image", func() {
|
||||||
|
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||||
|
|
||||||
|
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.ignored.html")
|
||||||
|
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||||
|
|
||||||
|
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(images).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns empty list if page has no meta tags", func() {
|
||||||
|
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||||
|
|
||||||
|
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.no_meta.html")
|
||||||
|
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||||
|
|
||||||
|
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(images).To(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error if API call fails", func() {
|
||||||
|
apiClient.Err = errors.New("api error")
|
||||||
|
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("get artist info"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error if scraper call fails", func() {
|
||||||
|
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||||
|
|
||||||
|
httpClient.Err = errors.New("scraper error")
|
||||||
|
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(err.Error()).To(ContainSubstring("get artist url"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
132
core/agents/lastfm/auth_router.go
Normal file
132
core/agents/lastfm/auth_router.go
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
_ "embed"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/deluan/rest"
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/go-chi/chi/v5/middleware"
|
||||||
|
"github.com/navidrome/navidrome/conf"
|
||||||
|
"github.com/navidrome/navidrome/consts"
|
||||||
|
"github.com/navidrome/navidrome/core/agents"
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/model"
|
||||||
|
"github.com/navidrome/navidrome/model/request"
|
||||||
|
"github.com/navidrome/navidrome/server"
|
||||||
|
"github.com/navidrome/navidrome/utils/req"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed token_received.html
|
||||||
|
var tokenReceivedPage []byte
|
||||||
|
|
||||||
|
type Router struct {
|
||||||
|
http.Handler
|
||||||
|
ds model.DataStore
|
||||||
|
sessionKeys *agents.SessionKeys
|
||||||
|
client *client
|
||||||
|
apiKey string
|
||||||
|
secret string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRouter(ds model.DataStore) *Router {
|
||||||
|
r := &Router{
|
||||||
|
ds: ds,
|
||||||
|
apiKey: conf.Server.LastFM.ApiKey,
|
||||||
|
secret: conf.Server.LastFM.Secret,
|
||||||
|
sessionKeys: &agents.SessionKeys{DataStore: ds, KeyName: sessionKeyProperty},
|
||||||
|
}
|
||||||
|
r.Handler = r.routes()
|
||||||
|
hc := &http.Client{
|
||||||
|
Timeout: consts.DefaultHttpClientTimeOut,
|
||||||
|
}
|
||||||
|
r.client = newClient(r.apiKey, r.secret, "en", hc)
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Router) routes() http.Handler {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(server.Authenticator(s.ds))
|
||||||
|
r.Use(server.JWTRefresher)
|
||||||
|
|
||||||
|
r.Get("/link", s.getLinkStatus)
|
||||||
|
r.Delete("/link", s.unlink)
|
||||||
|
})
|
||||||
|
|
||||||
|
r.Get("/link/callback", s.callback)
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Router) getLinkStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
|
resp := map[string]interface{}{
|
||||||
|
"apiKey": s.apiKey,
|
||||||
|
}
|
||||||
|
u, _ := request.UserFrom(r.Context())
|
||||||
|
key, err := s.sessionKeys.Get(r.Context(), u.ID)
|
||||||
|
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||||
|
resp["error"] = err
|
||||||
|
resp["status"] = false
|
||||||
|
_ = rest.RespondWithJSON(w, http.StatusInternalServerError, resp)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
resp["status"] = key != ""
|
||||||
|
_ = rest.RespondWithJSON(w, http.StatusOK, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Router) unlink(w http.ResponseWriter, r *http.Request) {
|
||||||
|
u, _ := request.UserFrom(r.Context())
|
||||||
|
err := s.sessionKeys.Delete(r.Context(), u.ID)
|
||||||
|
if err != nil {
|
||||||
|
_ = rest.RespondWithError(w, http.StatusInternalServerError, err.Error())
|
||||||
|
} else {
|
||||||
|
_ = rest.RespondWithJSON(w, http.StatusOK, map[string]string{})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Router) callback(w http.ResponseWriter, r *http.Request) {
|
||||||
|
p := req.Params(r)
|
||||||
|
token, err := p.String("token")
|
||||||
|
if err != nil {
|
||||||
|
_ = rest.RespondWithError(w, http.StatusBadRequest, "token not received")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
uid, err := p.String("uid")
|
||||||
|
if err != nil {
|
||||||
|
_ = rest.RespondWithError(w, http.StatusBadRequest, "uid not received")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Need to add user to context, as this is a non-authenticated endpoint, so it does not
|
||||||
|
// automatically contain any user info
|
||||||
|
ctx := request.WithUser(r.Context(), model.User{ID: uid})
|
||||||
|
err = s.fetchSessionKey(ctx, uid, token)
|
||||||
|
if err != nil {
|
||||||
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||||
|
w.WriteHeader(http.StatusBadRequest)
|
||||||
|
_, _ = w.Write([]byte("An error occurred while authorizing with Last.fm. \n\nRequest ID: " + middleware.GetReqID(ctx)))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
http.ServeContent(w, r, "response", time.Now(), bytes.NewReader(tokenReceivedPage))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Router) fetchSessionKey(ctx context.Context, uid, token string) error {
|
||||||
|
sessionKey, err := s.client.getSession(ctx, token)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(ctx, "Could not fetch LastFM session key", "userId", uid, "token", token,
|
||||||
|
"requestId", middleware.GetReqID(ctx), err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = s.sessionKeys.Put(ctx, uid, sessionKey)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Could not save LastFM session key", "userId", uid, "requestId", middleware.GetReqID(ctx), err)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
233
core/agents/lastfm/client.go
Normal file
233
core/agents/lastfm/client.go
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"slices"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
apiBaseUrl = "https://ws.audioscrobbler.com/2.0/"
|
||||||
|
)
|
||||||
|
|
||||||
|
type lastFMError struct {
|
||||||
|
Code int
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *lastFMError) Error() string {
|
||||||
|
return fmt.Sprintf("last.fm error(%d): %s", e.Code, e.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
type httpDoer interface {
|
||||||
|
Do(req *http.Request) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newClient(apiKey string, secret string, lang string, hc httpDoer) *client {
|
||||||
|
return &client{apiKey, secret, lang, hc}
|
||||||
|
}
|
||||||
|
|
||||||
|
type client struct {
|
||||||
|
apiKey string
|
||||||
|
secret string
|
||||||
|
lang string
|
||||||
|
hc httpDoer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) albumGetInfo(ctx context.Context, name string, artist string, mbid string) (*Album, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "album.getInfo")
|
||||||
|
params.Add("album", name)
|
||||||
|
params.Add("artist", artist)
|
||||||
|
params.Add("mbid", mbid)
|
||||||
|
params.Add("lang", c.lang)
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &response.Album, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) artistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "artist.getInfo")
|
||||||
|
params.Add("artist", name)
|
||||||
|
params.Add("lang", c.lang)
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &response.Artist, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) artistGetSimilar(ctx context.Context, name string, limit int) (*SimilarArtists, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "artist.getSimilar")
|
||||||
|
params.Add("artist", name)
|
||||||
|
params.Add("limit", strconv.Itoa(limit))
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &response.SimilarArtists, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) artistGetTopTracks(ctx context.Context, name string, limit int) (*TopTracks, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "artist.getTopTracks")
|
||||||
|
params.Add("artist", name)
|
||||||
|
params.Add("limit", strconv.Itoa(limit))
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &response.TopTracks, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) GetToken(ctx context.Context) (string, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "auth.getToken")
|
||||||
|
c.sign(params)
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return response.Token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) getSession(ctx context.Context, token string) (string, error) {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "auth.getSession")
|
||||||
|
params.Add("token", token)
|
||||||
|
response, err := c.makeRequest(ctx, http.MethodGet, params, true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return response.Session.Key, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrobbleInfo struct {
|
||||||
|
artist string
|
||||||
|
track string
|
||||||
|
album string
|
||||||
|
trackNumber int
|
||||||
|
mbid string
|
||||||
|
duration int
|
||||||
|
albumArtist string
|
||||||
|
timestamp time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) updateNowPlaying(ctx context.Context, sessionKey string, info ScrobbleInfo) error {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "track.updateNowPlaying")
|
||||||
|
params.Add("artist", info.artist)
|
||||||
|
params.Add("track", info.track)
|
||||||
|
params.Add("album", info.album)
|
||||||
|
params.Add("trackNumber", strconv.Itoa(info.trackNumber))
|
||||||
|
params.Add("mbid", info.mbid)
|
||||||
|
params.Add("duration", strconv.Itoa(info.duration))
|
||||||
|
params.Add("albumArtist", info.albumArtist)
|
||||||
|
params.Add("sk", sessionKey)
|
||||||
|
resp, err := c.makeRequest(ctx, http.MethodPost, params, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.NowPlaying.IgnoredMessage.Code != "0" {
|
||||||
|
log.Warn(ctx, "LastFM: NowPlaying was ignored", "code", resp.NowPlaying.IgnoredMessage.Code,
|
||||||
|
"text", resp.NowPlaying.IgnoredMessage.Text)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) scrobble(ctx context.Context, sessionKey string, info ScrobbleInfo) error {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("method", "track.scrobble")
|
||||||
|
params.Add("timestamp", strconv.FormatInt(info.timestamp.Unix(), 10))
|
||||||
|
params.Add("artist", info.artist)
|
||||||
|
params.Add("track", info.track)
|
||||||
|
params.Add("album", info.album)
|
||||||
|
params.Add("trackNumber", strconv.Itoa(info.trackNumber))
|
||||||
|
params.Add("mbid", info.mbid)
|
||||||
|
params.Add("duration", strconv.Itoa(info.duration))
|
||||||
|
params.Add("albumArtist", info.albumArtist)
|
||||||
|
params.Add("sk", sessionKey)
|
||||||
|
resp, err := c.makeRequest(ctx, http.MethodPost, params, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Scrobbles.Scrobble.IgnoredMessage.Code != "0" {
|
||||||
|
log.Warn(ctx, "LastFM: scrobble was ignored", "code", resp.Scrobbles.Scrobble.IgnoredMessage.Code,
|
||||||
|
"text", resp.Scrobbles.Scrobble.IgnoredMessage.Text, "info", info)
|
||||||
|
}
|
||||||
|
if resp.Scrobbles.Attr.Accepted != 1 {
|
||||||
|
log.Warn(ctx, "LastFM: scrobble was not accepted", "code", resp.Scrobbles.Scrobble.IgnoredMessage.Code,
|
||||||
|
"text", resp.Scrobbles.Scrobble.IgnoredMessage.Text, "info", info)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) makeRequest(ctx context.Context, method string, params url.Values, signed bool) (*Response, error) {
|
||||||
|
params.Add("format", "json")
|
||||||
|
params.Add("api_key", c.apiKey)
|
||||||
|
|
||||||
|
if signed {
|
||||||
|
c.sign(params)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, _ := http.NewRequestWithContext(ctx, method, apiBaseUrl, nil)
|
||||||
|
req.URL.RawQuery = params.Encode()
|
||||||
|
|
||||||
|
log.Trace(ctx, fmt.Sprintf("Sending Last.fm %s request", req.Method), "url", req.URL)
|
||||||
|
resp, err := c.hc.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
decoder := json.NewDecoder(resp.Body)
|
||||||
|
|
||||||
|
var response Response
|
||||||
|
jsonErr := decoder.Decode(&response)
|
||||||
|
if resp.StatusCode != 200 && jsonErr != nil {
|
||||||
|
return nil, fmt.Errorf("last.fm http status: (%d)", resp.StatusCode)
|
||||||
|
}
|
||||||
|
if jsonErr != nil {
|
||||||
|
return nil, jsonErr
|
||||||
|
}
|
||||||
|
if response.Error != 0 {
|
||||||
|
return &response, &lastFMError{Code: response.Error, Message: response.Message}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &response, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) sign(params url.Values) {
|
||||||
|
// the parameters must be in order before hashing
|
||||||
|
keys := make([]string, 0, len(params))
|
||||||
|
for k := range params {
|
||||||
|
if slices.Contains([]string{"format", "callback"}, k) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
msg := strings.Builder{}
|
||||||
|
for _, k := range keys {
|
||||||
|
msg.WriteString(k)
|
||||||
|
msg.WriteString(params[k][0])
|
||||||
|
}
|
||||||
|
msg.WriteString(c.secret)
|
||||||
|
hash := md5.Sum([]byte(msg.String()))
|
||||||
|
params.Add("api_sig", hex.EncodeToString(hash[:]))
|
||||||
|
}
|
||||||
173
core/agents/lastfm/client_test.go
Normal file
173
core/agents/lastfm/client_test.go
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("client", func() {
|
||||||
|
var httpClient *tests.FakeHttpClient
|
||||||
|
var client *client
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
httpClient = &tests.FakeHttpClient{}
|
||||||
|
client = newClient("API_KEY", "SECRET", "pt", httpClient)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("albumGetInfo", func() {
|
||||||
|
It("returns an album on successful response", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
|
||||||
|
album, err := client.albumGetInfo(context.Background(), "Believe", "U2", "mbid-1234")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(album.Name).To(Equal("Believe"))
|
||||||
|
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?album=Believe&api_key=API_KEY&artist=U2&format=json&lang=pt&mbid=mbid-1234&method=album.getInfo"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("artistGetInfo", func() {
|
||||||
|
It("returns an artist for a successful response", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
|
||||||
|
artist, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(artist.Name).To(Equal("U2"))
|
||||||
|
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&method=artist.getInfo"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if Last.fm returns an http status != 200", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`Internal Server Error`)),
|
||||||
|
StatusCode: 500,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(MatchError("last.fm http status: (500)"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if Last.fm returns an http status != 200", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":3,"message":"Invalid Method - No method with that name in this package"}`)),
|
||||||
|
StatusCode: 400,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(MatchError(&lastFMError{Code: 3, Message: "Invalid Method - No method with that name in this package"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if Last.fm returns an error", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"error":6,"message":"The artist you supplied could not be found"}`)),
|
||||||
|
StatusCode: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(MatchError(&lastFMError{Code: 6, Message: "The artist you supplied could not be found"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if HttpClient.Do() returns error", func() {
|
||||||
|
httpClient.Err = errors.New("generic error")
|
||||||
|
|
||||||
|
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(MatchError("generic error"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("fails if returned body is not a valid JSON", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`<xml>NOT_VALID_JSON</xml>`)),
|
||||||
|
StatusCode: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||||
|
Expect(err).To(MatchError("invalid character '<' looking for beginning of value"))
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("artistGetSimilar", func() {
|
||||||
|
It("returns an artist for a successful response", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
|
||||||
|
similar, err := client.artistGetSimilar(context.Background(), "U2", 2)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(len(similar.Artists)).To(Equal(2))
|
||||||
|
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getSimilar"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("artistGetTopTracks", func() {
|
||||||
|
It("returns top tracks for a successful response", func() {
|
||||||
|
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||||
|
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||||
|
|
||||||
|
top, err := client.artistGetTopTracks(context.Background(), "U2", 2)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
Expect(len(top.Track)).To(Equal(2))
|
||||||
|
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getTopTracks"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("GetToken", func() {
|
||||||
|
It("returns a token when the request is successful", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"token":"TOKEN"}`)),
|
||||||
|
StatusCode: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(client.GetToken(context.Background())).To(Equal("TOKEN"))
|
||||||
|
queryParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(queryParams.Get("method")).To(Equal("auth.getToken"))
|
||||||
|
Expect(queryParams.Get("format")).To(Equal("json"))
|
||||||
|
Expect(queryParams.Get("api_key")).To(Equal("API_KEY"))
|
||||||
|
Expect(queryParams.Get("api_sig")).ToNot(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("getSession", func() {
|
||||||
|
It("returns a session key when the request is successful", func() {
|
||||||
|
httpClient.Res = http.Response{
|
||||||
|
Body: io.NopCloser(bytes.NewBufferString(`{"session":{"name":"Navidrome","key":"SESSION_KEY","subscriber":0}}`)),
|
||||||
|
StatusCode: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
Expect(client.getSession(context.Background(), "TOKEN")).To(Equal("SESSION_KEY"))
|
||||||
|
queryParams := httpClient.SavedRequest.URL.Query()
|
||||||
|
Expect(queryParams.Get("method")).To(Equal("auth.getSession"))
|
||||||
|
Expect(queryParams.Get("format")).To(Equal("json"))
|
||||||
|
Expect(queryParams.Get("token")).To(Equal("TOKEN"))
|
||||||
|
Expect(queryParams.Get("api_key")).To(Equal("API_KEY"))
|
||||||
|
Expect(queryParams.Get("api_sig")).ToNot(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("sign", func() {
|
||||||
|
It("adds an api_sig param with the signature", func() {
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("d", "444")
|
||||||
|
params.Add("callback", "https://myserver.com")
|
||||||
|
params.Add("a", "111")
|
||||||
|
params.Add("format", "json")
|
||||||
|
params.Add("c", "333")
|
||||||
|
params.Add("b", "222")
|
||||||
|
client.sign(params)
|
||||||
|
Expect(params).To(HaveKey("api_sig"))
|
||||||
|
sig := params.Get("api_sig")
|
||||||
|
expected := fmt.Sprintf("%x", md5.Sum([]byte("a111b222c333d444SECRET")))
|
||||||
|
Expect(sig).To(Equal(expected))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
17
core/agents/lastfm/lastfm_suite_test.go
Normal file
17
core/agents/lastfm/lastfm_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/navidrome/navidrome/log"
|
||||||
|
"github.com/navidrome/navidrome/tests"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLastFM(t *testing.T) {
|
||||||
|
tests.Init(t, false)
|
||||||
|
log.SetLevel(log.LevelFatal)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "LastFM Test Suite")
|
||||||
|
}
|
||||||
119
core/agents/lastfm/responses.go
Normal file
119
core/agents/lastfm/responses.go
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
Artist Artist `json:"artist"`
|
||||||
|
SimilarArtists SimilarArtists `json:"similarartists"`
|
||||||
|
TopTracks TopTracks `json:"toptracks"`
|
||||||
|
Album Album `json:"album"`
|
||||||
|
Error int `json:"error"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
Session Session `json:"session"`
|
||||||
|
NowPlaying NowPlaying `json:"nowplaying"`
|
||||||
|
Scrobbles Scrobbles `json:"scrobbles"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Album struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
MBID string `json:"mbid"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
Image []ExternalImage `json:"image"`
|
||||||
|
Description Description `json:"wiki"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Artist struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
MBID string `json:"mbid"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
Image []ExternalImage `json:"image"`
|
||||||
|
Bio Description `json:"bio"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SimilarArtists struct {
|
||||||
|
Artists []Artist `json:"artist"`
|
||||||
|
Attr Attr `json:"@attr"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Attr struct {
|
||||||
|
Artist string `json:"artist"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ExternalImage struct {
|
||||||
|
URL string `json:"#text"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Description struct {
|
||||||
|
Published string `json:"published"`
|
||||||
|
Summary string `json:"summary"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Track struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
MBID string `json:"mbid"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TopTracks struct {
|
||||||
|
Track []Track `json:"track"`
|
||||||
|
Attr Attr `json:"@attr"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Session struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Subscriber int `json:"subscriber"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type NowPlaying struct {
|
||||||
|
Artist struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"artist"`
|
||||||
|
IgnoredMessage struct {
|
||||||
|
Code string `json:"code"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"ignoredMessage"`
|
||||||
|
Album struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"album"`
|
||||||
|
AlbumArtist struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"albumArtist"`
|
||||||
|
Track struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"track"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Scrobbles struct {
|
||||||
|
Attr struct {
|
||||||
|
Accepted int `json:"accepted"`
|
||||||
|
Ignored int `json:"ignored"`
|
||||||
|
} `json:"@attr"`
|
||||||
|
Scrobble struct {
|
||||||
|
Artist struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"artist"`
|
||||||
|
IgnoredMessage struct {
|
||||||
|
Code string `json:"code"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"ignoredMessage"`
|
||||||
|
AlbumArtist struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"albumArtist"`
|
||||||
|
Timestamp string `json:"timestamp"`
|
||||||
|
Album struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"album"`
|
||||||
|
Track struct {
|
||||||
|
Corrected string `json:"corrected"`
|
||||||
|
Text string `json:"#text"`
|
||||||
|
} `json:"track"`
|
||||||
|
} `json:"scrobble"`
|
||||||
|
}
|
||||||
65
core/agents/lastfm/responses_test.go
Normal file
65
core/agents/lastfm/responses_test.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package lastfm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("LastFM responses", func() {
|
||||||
|
Describe("Artist", func() {
|
||||||
|
It("parses the response correctly", func() {
|
||||||
|
var resp Response
|
||||||
|
body, _ := os.ReadFile("tests/fixtures/lastfm.artist.getinfo.json")
|
||||||
|
err := json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.Artist.Name).To(Equal("U2"))
|
||||||
|
Expect(resp.Artist.MBID).To(Equal("a3cb23fc-acd3-4ce0-8f36-1e5aa6a18432"))
|
||||||
|
Expect(resp.Artist.URL).To(Equal("https://www.last.fm/music/U2"))
|
||||||
|
Expect(resp.Artist.Bio.Summary).To(ContainSubstring("U2 é uma das mais importantes bandas de rock de todos os tempos"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("SimilarArtists", func() {
|
||||||
|
It("parses the response correctly", func() {
|
||||||
|
var resp Response
|
||||||
|
body, _ := os.ReadFile("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||||
|
err := json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.SimilarArtists.Artists).To(HaveLen(2))
|
||||||
|
Expect(resp.SimilarArtists.Artists[0].Name).To(Equal("Passengers"))
|
||||||
|
Expect(resp.SimilarArtists.Artists[1].Name).To(Equal("INXS"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("TopTracks", func() {
|
||||||
|
It("parses the response correctly", func() {
|
||||||
|
var resp Response
|
||||||
|
body, _ := os.ReadFile("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||||
|
err := json.Unmarshal(body, &resp)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(resp.TopTracks.Track).To(HaveLen(2))
|
||||||
|
Expect(resp.TopTracks.Track[0].Name).To(Equal("Beautiful Day"))
|
||||||
|
Expect(resp.TopTracks.Track[0].MBID).To(Equal("f7f264d0-a89b-4682-9cd7-a4e7c37637af"))
|
||||||
|
Expect(resp.TopTracks.Track[1].Name).To(Equal("With or Without You"))
|
||||||
|
Expect(resp.TopTracks.Track[1].MBID).To(Equal("6b9a509f-6907-4a6e-9345-2f12da09ba4b"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Error", func() {
|
||||||
|
It("parses the error response correctly", func() {
|
||||||
|
var error Response
|
||||||
|
body := []byte(`{"error":3,"message":"Invalid Method - No method with that name in this package"}`)
|
||||||
|
err := json.Unmarshal(body, &error)
|
||||||
|
Expect(err).To(BeNil())
|
||||||
|
|
||||||
|
Expect(error.Error).To(Equal(3))
|
||||||
|
Expect(error.Message).To(Equal("Invalid Method - No method with that name in this package"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
16
core/agents/lastfm/token_received.html
Normal file
16
core/agents/lastfm/token_received.html
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<title>Account Linking Success</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h2 id="msg"></h2>
|
||||||
|
<script>
|
||||||
|
setTimeout("document.getElementById('msg').innerHTML = 'Success! Your account is linked to Last.fm. You can close this tab now.';",2000)
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
window.close();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user