Compare commits

..

103 Commits

Author SHA1 Message Date
HiItsStolas
49fef60cca chore: update pnpm.lock 2025-12-15 10:11:49 +10:00
HiItsStolas
5e9086e1c4 fix(musicapi): fixed similar and discography api endpoints 2025-12-15 09:36:47 +10:00
HiItsStolas
d93769003f chore(music): updating music migrations 2025-12-15 09:36:45 +10:00
HiItsStolas
72e838ab2e fix(mediarequestsubscriber): fixed request notifications
Aligned the request notifications with radarr and sonarr, which fixed how it used updateParentStatus
2025-12-15 09:36:44 +10:00
HiItsStolas
d7eb60f471 fix(dicover music): fixed music not showing in discover music
Music was being blacklisted, and the API call to listenbrainz was wrong
2025-12-15 09:36:42 +10:00
HiItsStolas
8e04ab884f fix(blacklist): needed to make tmdbId optional 2025-12-15 09:36:41 +10:00
HiItsStolas
bcdc18e3b9 refactor(music): making api calls non monolithic
separted the discography and similar-artist from the monolithic artists call
2025-12-15 09:36:40 +10:00
HiItsStolas
c3386d42f5 fix(mediarequest): fixing broken vars and aligning with develop 2025-12-15 09:36:39 +10:00
HiItsStolas
5926880d02 chore(update pnpm-lock): update pnpm-lock 2025-12-15 09:36:35 +10:00
Pierre
438a144721 refactor: switch from Fetch API to Axios 2025-12-15 09:36:22 +10:00
Pierre
66e6ce2545 fix: apply tag requests to artist in Lidarr instead of album 2025-12-15 09:36:21 +10:00
Pierre
97defc17ba style: apply formatting using pnpm format 2025-12-15 09:36:20 +10:00
Pierre
94dcdabfc7 refactor: unify delete file route logic across Radarr, Sonarr, and Lidarr 2025-12-15 09:36:19 +10:00
Pierre
32d1a80fe3 fix: ensure filtered request array excludes undefined entries to satisfy type safety 2025-12-15 09:36:18 +10:00
Pierre
1db665b37d refactor: combine the AddLidarrServiceIdToOverrideRules migration with AddMusicSupport 2025-12-15 09:36:17 +10:00
Pierre
c5cb163fb7 fix: proper handling of multiple Lidarr servers 2025-12-15 09:36:15 +10:00
Pierre
5fe04d66e5 fix: include missing dompurify dependency in package.json and pnpm-lock.yaml 2025-12-15 09:36:12 +10:00
Pierre
773f1f3580 fix: update migration file to resolve compatibility issues with main repo develop branch 2025-12-15 09:36:11 +10:00
Pierre
0ebf40285d fix: allow null values for caaUrl to prevent SQLITE_CONSTRAINT: NOT NULL constraint failure in metadata_album.caaUrl 2025-12-15 09:36:10 +10:00
Pierre
18ce7765e1 chore: update package and pnpm-lock files 2025-12-15 09:36:06 +10:00
Pierre
d39aef5fd4 fix: recently added albums are now correctly marked as available when using Plex as the media server 2025-12-15 09:35:51 +10:00
Pierre
df99d61e13 fix: ensure proper monitoring and searching of unmonitored existing albums in Lidarr upon request 2025-12-15 09:35:50 +10:00
Pierre
97f9c2d6c9 fix: properly pass qualityProfile and metadataProfile in music requests based on Lidarr configuration selection 2025-12-15 09:35:49 +10:00
Pierre
3e08771c6a refactor: move lidarrServiceId column creation to a separate migration file 2025-12-15 09:35:48 +10:00
Pierre
bf13cdce68 refactor: remove singleton pattern to ensure consistency across all calls and API files 2025-12-15 09:35:47 +10:00
Pierre
b7f8e22db2 refactor: change variable from "month" to "week" for better content update granularity 2025-12-15 09:35:46 +10:00
Pierre
81faf7d8ab fix: remove redundant try/catch since error is already handled with fetchCoverArt.catch() 2025-12-15 09:35:45 +10:00
Pierre
5581865fc3 fix: email notification music button now correctly redirects to the music media page 2025-12-15 09:35:44 +10:00
Pierre
0341c705ef refactor: replace Promise.all with Promise.allSettled to handle external API failures more gracefully 2025-12-15 09:35:43 +10:00
Pierre
a7e34de2dd refactor: remove duplicate properties in LidarrSettings by extending DVRSettings 2025-12-15 09:35:42 +10:00
Pierre
c212858221 refactor: enforce type validation using z.union for tmdbId and mbId 2025-12-15 09:35:41 +10:00
Pierre
38532bd28a fix: lower maxRPS from 25 to 1 to prevent hitting rate limits 2025-12-15 09:35:40 +10:00
Pierre
9a97811e88 fix: added MEDIA_FAILED handling for music content in email notifications 2025-12-15 09:35:39 +10:00
Pierre
f2103388b5 refactor: reorder and organize permission constants to prevent issues with existing setups 2025-12-15 09:35:39 +10:00
Pierre
d9bda583a5 fix: mitigate remote property injection vulnerabilities in CoverArtArchive 2025-12-15 09:35:38 +10:00
Pierre
be7ae8b423 style/fix: apply pnpm format and remove duplicated constant 2025-12-15 09:35:37 +10:00
Pierre
92a8badb32 fix(ui): correct media action icon size for music page 2025-12-15 09:35:36 +10:00
Pierre
f535b08f0b fix: removed duplicated "'" character in .replace function 2025-12-15 09:35:35 +10:00
Pierre
c6fc576f1e chore: update pnpm-lock.yaml 2025-12-15 09:35:29 +10:00
Pierre
0c3ecc718b fix: mitigate SSRF vulnerabilities 2025-12-15 09:35:05 +10:00
Pierre
3b4529f3b1 fix: no more repeated character ''' in the same character class 2025-12-15 09:35:04 +10:00
Pierre
be3aa05bc9 fix: remove unused square image and restore previously missing ones 2025-12-15 09:35:03 +10:00
Pierre
612fbacd48 fix: remove duplicated LidarrModal 2025-12-15 09:35:02 +10:00
Pierre
ff873e6d2b refactor(mediarequest): merging changes 2025-12-15 09:35:01 +10:00
Pierre
cdb9d2450a refactor(person details): merging Person Details 2025-12-15 09:34:56 +10:00
Pierre
31ce44c452 fix(mediarequests): changed lidarr notification and request to match new architecture
This was using an older architecture for the notifications and request, this change updates the
system to match how movies and tv are done
2025-12-15 09:26:52 +10:00
Pierre
fe37a1de98 fix: resolved issues with the music slider displaying all menus, and ensured media are properly removed from Lidarr. 2025-12-15 09:26:51 +10:00
Pierre
a190320abd fix: properly fetch music library from Emby servers 2025-12-15 09:26:50 +10:00
Pierre
cb6d271f22 fix: correctly populate ratingKey during music import for Plex and properly display artist name in the slide-over menu 2025-12-15 09:26:49 +10:00
Pierre
f9259cfcdf fix: properly fetch Plex music library with correct release-group mapping 2025-12-15 09:26:48 +10:00
Pierre
a0a8dfc496 fix: titlecard now have a proper behaviour even if the image is empty and applied prettier to migration file 2025-12-15 09:26:47 +10:00
Pierre
c1c3ae99bc fix: mbId is now created in watchlist table upon migration 2025-12-15 09:26:46 +10:00
Pierre
adf56d63bc feat: lidarr/Music support added 2025-12-15 09:26:35 +10:00
Ludovic Ortega
539d49879d chore: fix translate badge svg url (#2228)
* chore: fix translate badge svg url

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

* fix: use https instead of http

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

---------

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-12-14 05:37:36 +08:00
RolliePollie18
15356dfe49 fix(jellyfin-scan): reduce jellyfin API calls during recently added scan (#2205)
* fix(jellyfin scanner): reduce jellyfin API calls during recently added scan

Significantly reduce number of API calls, addressing CPU spikes on Jellyfin 10.10+ servers.- Move
getSeasons() call outside the seasons loop (N calls to 1)- Request MediaSources via getEpisodes()
field parameter instead of  individual getItemData() calls per episode (N calls to 1 per season)
Performance improvements (tested on library with 12 TV shows):- Scan duration: 43.7s to 9.1s - Peak
CPU: 277% to 115% - CPU spike duration: 36s to 2s Functionality is unchanged, all availability
statuses identicalbefore and after.

* fix: add getEpisodes overloads to remove unsafe type assertion

* refactor(jellyfin): use generics instead of overloads

---------

Co-authored-by: patrick-acland <patrick.acland@kraken.tech>
2025-12-09 22:20:47 +08:00
fallenbagel
1f04eeb040 fix: disable automatic auth revalidation on auth pages (#2213)
* fix: disable automatic auth revalidation on auth pages

Prevents unnecessary `/api/v1/auth/me` requests on login, setup, and password reset pages.

fix #738

* fix: update regex to include resetpassword guid & add missing condition in refreshInterval
2025-12-09 13:17:17 +01:00
Thibaut Noah
e3028c21f2 docs: add webpush related troubleshooting steps (#2170)
* Update troubleshooting.mdx

Add potential fixes for users who fail to enable their web push notifications

* Update docs/troubleshooting.mdx

Modify appName syntax for better coding norm

Co-authored-by: Gauthier <mail@gauthierth.fr>

* refactor: apply suggestions from review comments

Co-authored-by: Gauthier <mail@gauthierth.fr>

* docs(troubleshooting): fix typos in troubleshooting doc page

---------

Co-authored-by: Gauthier <mail@gauthierth.fr>
Co-authored-by: fallenbagel <98979876+fallenbagel@users.noreply.github.com>
2025-12-09 08:49:42 +00:00
Gauthier
9d8b343790 chore(deps): update all non-major dependencies (#2188)
Update all non-major dependencies. Modifications in `src` files are there to fix linting issues.
2025-12-09 09:40:35 +01:00
fallenbagel
f4fe16608a fix(jellyfin-api): use standard Authorization header (#2211)
Replace X-Emby-Authorization with Authorization header to fix authentication failures when users
have <EnableLegacyAuthorization>false</EnableLegacyAuthorization> in their Jellyfin system.xml.
2025-12-08 15:46:47 +01:00
Ludovic Ortega
d660a540da chore(helm): prepare for release (#2189)
Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-12-07 17:22:28 +01:00
Ludovic Ortega
48ef2984e5 docs: fix chown command for windows users (#2192)
Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-12-03 14:39:03 +01:00
Disparate2761
c5fc31c352 docs(buildfromsource): touch up path inconsistencies (#2184) 2025-12-01 14:57:01 +01:00
Ludovic Ortega
c3b9ea6ce4 chore: improve PR template (#2175) 2025-11-28 13:05:47 +01:00
Ludovic Ortega
b66b36186a docs: update weblate links (#2168)
Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-11-22 23:29:35 +01:00
Ludovic Ortega
fb5196bdec chore: remove CHANGELOG.md (#2169) 2025-11-22 23:05:42 +01:00
0xsysr3ll
bde322de8e fix(override-rules): show correct genres for both *arr services (#2155) 2025-11-21 22:24:14 +01:00
Gauvain
af083a3cd5 chore: rebrand from Jellyseerr to Seerr across project (#2116) 2025-11-18 22:51:20 +01:00
Ludovic Ortega
f4af6ed5f4 docs: add migration guide (#2069) 2025-11-18 11:12:50 +01:00
0xsysr3ll
267450a297 docs: update AI assistance notice link in pull request template (#2154)
Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-11-14 16:56:30 +01:00
0xsysr3ll
939000fbe4 ci: update Docker Hub image references in CI workflows (#2153)
Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-11-14 16:36:38 +01:00
James Kruger
08800c7cf3 docs: update Kubernetes installation documentation for Seerr (#2126) 2025-11-14 10:57:44 +01:00
0xsysr3ll
2fe72530a2 fix(docker): pass COMMIT_TAG to build stage for custom image builds (#2146)
This PR fixes the issue where custom images built with `--build-arg COMMIT_TAG` would fail because the client bundle didn't receive the commit tag value.

Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-11-12 22:50:25 +01:00
Ludovic Ortega
6dcae346f9 fix(docker): casing in dockerfile (#2141) 2025-11-11 17:00:31 +00:00
0xsysr3ll
597858785e fix(ui): ensure mobile media type filter is always visible on actor pages (#2128)
Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-11-05 21:27:11 +01:00
Joe Harrison
91aa7d143e ci: bump cosign installer to v4.0.0 (#2127) 2025-11-04 11:33:47 +01:00
Ludovic Ortega
41bcbfe9a4 chore: remove packages section in README (#2124)
Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-11-03 21:59:36 +01:00
Joe Harrison
7d4b2853dc ci: combined workflows for ai and support (#2113) 2025-10-31 13:12:07 +01:00
fallenbagel
4980803079 docs: revert docs to legacy docs temporarily (#2110) 2025-10-30 23:31:27 +00:00
Gauthier
4e9c94c80f feat: Overseerr to Jellyseerr migration (#2019)
* feat: add Overseerr migration

* refactor: rename to Seerr

* refactor: more rename to Seerr

* feat: update the value of the MediaStatus.DELETED enum

* fix: add more details in migration logs

* fix: replace .update by .save for TypeORM hooks

* fix: add fake migration to skip the duplicated UpdateWebPush migration

* fix: rewrite the AddUserAvatarCacheFields migration for Overseerr merge

* fix: replace jellyseerr migrations with a dedicated one for overseerr

* fix: update overseerr migration

* fix: update overseerr migration

* fix: remove irrelevant changes

* fix: typos

* docs: update jsdoc comment

* docs: update seerr description

* docs: fix the contributing.md link

* fix: remove unwanterd change on postgres dev datasource

* docs: add latest tag to docker image

* fix: migrate old deleted status for 4k media

* fix: update Seerr version check
2025-10-30 19:57:50 +01:00
TacoCake
2e6e9ad657 fix: include video content in the blacklisted tags processing job (#1736)
* fix: include video content in the blacklisted tags processing job

Modified the “blacklisted tags” job to include adult & video content, this correctly blacklists more
adult films that were always missed, even if they had the tag.

* refactor: remove dead code

* refactor: remove redundant explicit arguments
2025-10-28 20:29:04 -06:00
0xsysr3ll
9a92d6ac30 fix(api): respect is4k parameter for all media status changes (#1951)
Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-10-28 17:26:28 +01:00
0xsysr3ll
7dfa30a151 fix(media): handle 4K Radarr removal for multiple instances (#2037)
This PR fixes an issue where removing 4K movies from Radarr failed when multiple Radarr instances were configured. The backend was misparsing boolean query parameters and using string slugs instead of TMDB IDs. The fix ensures that the correct 4K Radarr instance is targeted and that TMDB IDs are used for movie removal.

Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-10-28 17:25:57 +01:00
Gauthier
efc9b00d39 ci: fix AI-generated workflow trigger (#2101) 2025-10-28 15:46:14 +00:00
Gauthier
e246215663 ci: add a new workflow to close AI-generated PRs (#2098)
* ci: add a new workflow to close AI-generated PRs

This PR adds a workflow to automatically close the PRs with too much AI-generated code.

* fix: apply review comments
2025-10-28 14:28:42 +00:00
Joe Harrison
843d05cc3f chore:update to the code of conduct link in bug report (#2091) 2025-10-27 09:57:49 +01:00
Joe Harrison
e781cd56b3 chore(bug.yml): fixed link to the code of conduct in the bug.yml in issue templates (#2090) 2025-10-27 08:31:22 +01:00
Ludovic Ortega
b34ca1543a feat: do not enforce TLD on email (#2075)
fix #1846
2025-10-20 17:24:24 +03:00
Ludovic Ortega
48a61d812b docs: migrate third-parties documentation to a dedicated folder (#2068)
* docs: migrate third party documentation to a dedidcated folders

---------

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-10-20 10:03:21 +02:00
J. Winters-Brown
f7f00ce361 feat: migrate to validator from email-validator (#2059)
* refactor(adds package): this adds the validator package and removes email-validator from dependencys

* refactor(auth.ts and email.ts): migrates from EmailValidator to validator
2025-10-19 22:37:09 +02:00
0xsysr3ll
a7909342b4 fix(api): correct Jellyfin users endpoint documentation (#2073)
Signed-off-by: 0xsysr3ll <0xsysr3ll@pm.me>
2025-10-19 22:32:58 +02:00
Joe Harrison
082ba3d037 ci: added helm cosign verification and renovate app workflow to bump chart versions (#2064)
* ci: added helm cosign verification and renovate app workflow to bump chart versions

* docs: add helm artifacts verification

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

* fix: update app id

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

* docs: add documentation link in helm chart and seerr docs

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

---------

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
Co-authored-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-10-19 04:22:28 +01:00
Brandon Cohen
a975ab25c3 fix: delete endpoint on push notification disable (#2067)
fix: add endpoint deletion on disable

fix: use definemessages util

refactor: add code comment
2025-10-19 00:03:28 +08:00
fallenbagel
0d6bfa18cc fix(download-tracker): reset both service caches when resetting downloads (#2065) 2025-10-17 21:10:02 +02:00
Ludovic Ortega
0dbbac02af docs: add documentation for dockerhub (#2063)
* docs: add documentation for dockerhub

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>

* docs: typo fixes

---------

Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
Co-authored-by: sudo-kraken <joe@j-harrison.co.uk>
2025-10-17 17:22:19 +02:00
Ludovic Ortega
81eab7434f ci: fix concurrency issue on support workflows (#2062) [skip ci]
Signed-off-by: Ludovic Ortega <ludovic.ortega@adminafk.fr>
2025-10-17 17:00:44 +02:00
renovate[bot]
669faccc85 ci(actions): update github/codeql-action action to v4 (#2056)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-16 21:30:50 +02:00
renovate[bot]
a0893a5831 ci(actions): update github actions (#2022)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-16 21:19:33 +02:00
fallenbagel
c4236dce73 docs: HAProxy documentation warning format (#2054)
Updated warning message for HAProxy documentation. And fixed a typo
2025-10-16 15:48:13 +02:00
Terry Sposato
f3d8f0d7ab docs: add haproxy configuration example (#2048) 2025-10-16 15:15:09 +02:00
Joe Harrison
a988f8e657 fix: update github repo refs for docker hub (#2053)
* fix: update github repo refs for docker hub

* ci: updated wf to use env var for the docker hub space
2025-10-16 21:12:17 +08:00
Joe Harrison
618563c6d7 docs: added guide for image verification (#2051)
* docs: added guide for image verification

* Update verifying-signed-images.mdx

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-16 14:10:09 +02:00
Joe Harrison
8688645a32 ci: update to release workflow (#2047)
* ci: update to release workflow

* build: re-ran lock file update with typeorm 0.3.12

* build: resync lockfile with develop

* ci: syntax fix in cliff.toml

* Update .github/workflows/release.yml

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* reverting co-pilots nonsense @fallenbagel's fault

Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
2025-10-16 12:53:02 +01:00
Joe Harrison
de0e9b1f35 fix: path in docs and compose for postgres 18 (#2049) 2025-10-16 07:36:56 +02:00
219 changed files with 18532 additions and 4663 deletions

View File

@@ -95,7 +95,7 @@ body:
id: terms
attributes:
label: Code of Conduct
description: By submitting this issue, you agree to follow our [Code of Conduct](/../../CODE_OF_CONDUCT.md)
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/seerr-team/seerr/blob/develop/CODE_OF_CONDUCT.md)
options:
- label: I agree to follow Seerr's Code of Conduct
required: true

View File

@@ -1,14 +1,33 @@
#### Description
<!--
Please read contributing guide before submitting
your pull request. Please fill in each section below to help us better prioritize your pull request. Thanks!
-->
#### Screenshot (if UI-related)
## Description
#### To-Dos
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
- [ ] Disclosed any use of AI (see our [policy](../CONTRIBUTING.md#ai-assistance-notice))
- Fixes #XXXX
## How Has This Been Tested?
<!--- Please describe in detail how you tested your changes. -->
<!--- Include details of your testing environment, and the tests you ran to -->
<!--- see how your change affects other areas of the code, etc. -->
## Screenshots / Logs (if applicable)
## Checklist:
<!--- Go over all the following points, and put an `x` in all the boxes that apply. -->
<!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! -->
- [ ] I have read and followed the contribution [guidelines](https://github.com/seerr-team/seerr/blob/develop/CONTRIBUTING.md).
- [ ] Disclosed any use of AI (see our [policy](https://github.com/seerr-team/seerr/blob/develop/CONTRIBUTING.md#ai-assistance-notice))
- [ ] I have updated the documentation accordingly.
- [ ] All new and existing tests passed.
- [ ] Successful build `pnpm build`
- [ ] Translation keys `pnpm i18n:extract`
- [ ] Database migration (if required)
#### Issues Fixed or Closed
- Fixes #XXXX

94
.github/cliff.toml vendored Normal file
View File

@@ -0,0 +1,94 @@
# git-cliff ~ configuration
# https://git-cliff.org/docs/configuration
[changelog]
header = ""
body = """
{%- macro remote_url() -%}
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
{%- endmacro -%}
{%- set excluded_users = ["github-actions[bot]", "dependabot[bot]", "renovate[bot]"] -%}
{% macro print_commit(commit) -%}
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
{% if commit.breaking %}[**breaking**] {% endif %}\
{{ commit.message | upper_first }} - \
([{{ commit.id | truncate(length=7, end="") }}]({{ self::remote_url() }}/commit/{{ commit.id }}))\
{% endmacro -%}
{% if version %}\
{% if previous.version %}\
## [{{ version | trim_start_matches(pat="v") }}]({{ self::remote_url() }}/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% endif %}\
{% else %}\
## [unreleased]
{% endif %}\
{%- for group, commits in commits | group_by(attribute="group") %}
### {{ group | striptags | trim | upper_first }}
{%- for commit in commits | filter(attribute="scope") | sort(attribute="scope") %}
{{ self::print_commit(commit=commit) }}
{%- endfor %}
{%- for commit in commits %}
{%- if not commit.scope -%}
{{ self::print_commit(commit=commit) }}
{%- endif -%}
{%- endfor -%}
{%- endfor -%}
{%- set valid_contributors = [] -%}
{%- for c in github.contributors | filter(attribute="is_first_time", value=true) %}
{%- if c.username and c.username not in excluded_users and c.username not in valid_contributors %}
{%- set_global valid_contributors = valid_contributors | concat(with=c.username) %}
{%- endif %}
{%- endfor %}
{%- if valid_contributors | length > 0 %}
## New Contributors ❤️
{%- for username in valid_contributors %}
* @{{ username }} made their first contribution
{%- endfor %}
{%- endif %}
"""
footer = """
<!-- generated by git-cliff -->
"""
trim = true
postprocessors = []
[git]
conventional_commits = true
filter_unconventional = true
split_commits = false
filter_commits = true
commit_preprocessors = [
{ pattern = '.*\[skip ci\].*', replace = "" },
{ pattern = '.*\[ci skip\].*', replace = "" },
]
commit_parsers = [
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
{ message = "^doc", group = "<!-- 3 -->📖 Documentation" },
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore\\(deps.*\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore\\(git-sync\\)", skip = true },
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
{ message = '.*\[skip ci\].*', skip = true },
{ message = '.*\[ci skip\].*', skip = true },
]
protect_breaking_commits = false
tag_pattern = "v?[0-9]+\\.[0-9]+\\.[0-9]+.*"
skip_tags = "beta|alpha|rc"
topo_order = false
sort_commits = "newest"

View File

@@ -14,6 +14,9 @@ on:
permissions:
contents: read
env:
DOCKER_HUB: seerr/seerr
concurrency:
group: ci-${{ github.ref }}
cancel-in-progress: true
@@ -23,7 +26,7 @@ jobs:
name: Lint & Test Build
if: github.event_name == 'pull_request'
runs-on: ubuntu-24.04
container: node:22.20.0-alpine3.22@sha256:cb3143549582cc5f74f26f0992cdef4a422b22128cb517f94173a5f910fa4ee7
container: node:22.20.0-alpine3.22@sha256:dbcedd8aeab47fbc0f4dd4bffa55b7c3c729a707875968d467aaaea42d6225af
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -31,7 +34,7 @@ jobs:
persist-credentials: false
- name: Pnpm Setup
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Get pnpm store directory
shell: sh
@@ -140,7 +143,7 @@ jobs:
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
images: |
${{ github.repository }}
${{ env.DOCKER_HUB }}
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=develop

View File

@@ -42,15 +42,15 @@ jobs:
persist-credentials: false
- name: Initialize CodeQL
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
languages: ${{ matrix.language }}
queries: +security-and-quality
- name: Autobuild
uses: github/codeql-action/autobuild@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/autobuild@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
category: '/language:${{ matrix.language }}'

View File

@@ -48,7 +48,7 @@ jobs:
package-manager-cache: false
- name: Pnpm Setup
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Install dependencies
run: pnpm install --frozen-lockfile

View File

@@ -3,9 +3,10 @@
name: Deploy to GitHub Pages
on:
workflow_dispatch:
push:
branches:
- develop
- legacy-jellyseerr
paths:
- 'docs/**'
- 'gen-docs/**'
@@ -34,7 +35,7 @@ jobs:
package-manager-cache: false
- name: Pnpm Setup
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Get pnpm store directory
shell: sh

View File

@@ -55,7 +55,7 @@ jobs:
# get current version
current_version=$(grep '^version:' "$chart_path/Chart.yaml" | awk '{print $2}')
# try to get current release version
if oras manifest fetch "ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:${current_version}" >/dev/null 2>&1; then
if oras manifest fetch "ghcr.io/${{ github.repository }}/${chart_name}:${current_version}" >/dev/null 2>&1; then
echo "No version change for $chart_name. Skipping."
else
helm dependency build "$chart_path"
@@ -87,8 +87,8 @@ jobs:
name: Publish to ghcr.io
runs-on: ubuntu-24.04
permissions:
packages: write # needed for pushing to github registry
id-token: write # needed for signing the images with GitHub OIDC Token
packages: write
id-token: write
needs: [package-helm-chart]
if: needs.package-helm-chart.outputs.has_artifacts == 'true'
steps:
@@ -105,7 +105,7 @@ jobs:
uses: oras-project/setup-oras@22ce207df3b08e061f537244349aac6ae1d214f6 # v1.2.4
- name: Install Cosign
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Downloads artifacts
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
@@ -128,17 +128,59 @@ jobs:
# push chart to OCI
chart_release_file=$(basename "$chart_path")
chart_name=${chart_release_file%-*}
helm push ${chart_path} oci://ghcr.io/${GITHUB_REPOSITORY@L} |& tee helm-push-output.log
helm push ${chart_path} oci://ghcr.io/${{ github.repository }} |& tee helm-push-output.log
chart_digest=$(awk -F "[, ]+" '/Digest/{print $NF}' < helm-push-output.log)
# sign chart
cosign sign "ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}@${chart_digest}"
cosign sign "ghcr.io/${{ github.repository }}/${chart_name}@${chart_digest}"
# push artifacthub-repo.yml to OCI
oras push \
ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:artifacthub.io \
ghcr.io/${{ github.repository }}/${chart_name}:artifacthub.io \
--config /dev/null:application/vnd.cncf.artifacthub.config.v1+yaml \
charts/$chart_name/artifacthub-repo.yml:application/vnd.cncf.artifacthub.repository-metadata.layer.v1.yaml \
|& tee oras-push-output.log
artifacthub_digest=$(grep "Digest:" oras-push-output.log | awk '{print $2}')
# sign artifacthub-repo.yml
cosign sign "ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:artifacthub.io@${artifacthub_digest}"
cosign sign "ghcr.io/${{ github.repository }}/${chart_name}:artifacthub.io@${artifacthub_digest}"
done
verify:
name: Verify signatures for each chart tag
needs: [publish]
runs-on: ubuntu-24.04
permissions:
contents: read
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: false
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Downloads artifacts
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: artifacts
path: .cr-release-packages/
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Verify signatures for each chart tag
run: |
for chart_path in $(find .cr-release-packages -name '*.tgz' -print); do
chart_release_file=$(basename "$chart_path")
chart_name=${chart_release_file%-*}
version=${chart_release_file#$chart_name-}
version=${version%.tgz}
cosign verify "ghcr.io/${{ github.repository }}/${chart_name}:${version}" \
--certificate-identity "https://github.com/${{ github.workflow_ref }}" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
done

View File

@@ -11,6 +11,9 @@ on:
permissions:
contents: read
env:
DOCKER_HUB: seerr/seerr
concurrency:
group: preview-${{ github.ref }}
cancel-in-progress: true
@@ -115,7 +118,7 @@ jobs:
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
images: |
${{ github.repository }}
${{ env.DOCKER_HUB }}
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=preview-${{ steps.ver.outputs.version }}

View File

@@ -3,7 +3,9 @@
name: Seerr Release
on:
workflow_dispatch:
push:
tags:
- 'v*'
permissions:
contents: read
@@ -12,15 +14,17 @@ concurrency:
group: release-${{ github.ref }}
cancel-in-progress: true
env:
DOCKER_HUB: seerr/seerr
jobs:
semantic-release:
name: Tag and release latest version
runs-on: ubuntu-22.04
env:
HUSKY: 0
changelog:
name: Generate changelog
runs-on: ubuntu-24.04
permissions:
contents: read
outputs:
new_release_published: ${{ steps.release.outputs.new_release_published }}
new_release_version: ${{ steps.release.outputs.new_release_version }}
release_body: ${{ steps.git-cliff.outputs.content }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -28,46 +32,36 @@ jobs:
fetch-depth: 0
persist-credentials: false
- name: Set up Node.js
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
- name: Generate changelog
id: git-cliff
uses: orhun/git-cliff-action@d77b37db2e3f7398432d34b72a12aa3e2ba87e51 # v4.6.0
with:
node-version-file: package.json
package-manager-cache: false
- name: Pnpm Setup
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Get pnpm store directory
shell: sh
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Release
id: release
uses: cycjimmy/semantic-release-action@9cc899c47e6841430bbaedb43de1560a568dfd16 # v5.0.0
with:
extra_plugins: |
@semantic-release/git@10
@semantic-release/changelog@6
@codedependant/semantic-release-docker@5
config: .github/cliff.toml
args: -vv --current
env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
OUTPUT: CHANGELOG.md
GITHUB_REPO: ${{ github.repository }}
create-draft-release:
name: Create draft release
runs-on: ubuntu-24.04
permissions:
contents: write
needs: changelog
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Draft Release
run: gh release create ${GITHUB_REF_NAME} -t "Release ${GITHUB_REF_NAME}" -n "${RELEASE_BODY}" --draft
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RELEASE_BODY: ${{ needs.changelog.outputs.release_body }}
build:
name: Build (per-arch, native runners)
needs: semantic-release
if: needs.semantic-release.outputs.new_release_published == 'true'
name: Build (${{ matrix.arch }})
strategy:
matrix:
include:
@@ -78,6 +72,8 @@ jobs:
platform: linux/arm64
arch: arm64
runs-on: ${{ matrix.runner }}
env:
VERSION: ${{ github.ref_name }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -91,7 +87,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Warm cache (no push) — ${{ matrix.platform }}
- name: Warm cache [${{ matrix.platform }}]
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
@@ -100,21 +96,23 @@ jobs:
push: false
build-args: |
COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }}
BUILD_VERSION=${{ env.VERSION }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
cache-from: type=gha,scope=${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
provenance: false
publish:
name: Publish multi-arch image
needs: [semantic-release, build]
if: needs.semantic-release.outputs.new_release_published == 'true'
name: Publish multi-arch manifests
needs: build
runs-on: ubuntu-24.04
permissions:
contents: read
id-token: write
packages: write
outputs:
image_digest: ${{ steps.digests.outputs.IMAGE_DIGEST }}
env:
VERSION: ${{ github.ref_name }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -146,14 +144,14 @@ jobs:
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
images: |
${{ github.repository }}
${{ env.DOCKER_HUB }}
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=${{ needs.semantic-release.outputs.new_release_version }}
type=raw,value=${{ env.VERSION }}
labels: |
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
- name: Build & Push (multi-arch, single tag)
- name: Build & Push (multi-arch)
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
@@ -162,7 +160,7 @@ jobs:
push: true
build-args: |
COMMIT_TAG=${{ github.sha }}
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }}
BUILD_VERSION=${{ env.VERSION }}
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
labels: ${{ steps.meta.outputs.labels }}
tags: ${{ steps.meta.outputs.tags }}
@@ -172,37 +170,158 @@ jobs:
cache-to: type=gha,mode=max
provenance: false
- name: Resolve manifest digest
id: digests
run: |
DIGEST=$(docker buildx imagetools inspect "${{ env.DOCKER_HUB }}:${{ env.VERSION }}" --format '{{json .Manifest.Digest}}' | tr -d '"')
echo "IMAGE_DIGEST=$DIGEST" >> $GITHUB_OUTPUT
- name: Also tag :latest (non-pre-release only)
shell: bash
if: ${{ !contains(env.VERSION, '-') }}
run: |
VER="${{ needs.semantic-release.outputs.new_release_version }}"
if [[ "$VER" != *"-"* ]]; then
docker buildx imagetools create \
-t ${{ github.repository }}:latest \
${{ github.repository }}:${VER}
docker buildx imagetools create \
-t ghcr.io/${{ github.repository }}:latest \
ghcr.io/${{ github.repository }}:${VER}
fi
docker buildx imagetools create \
-t ${{ env.DOCKER_HUB }}:latest \
${{ env.DOCKER_HUB }}:${{ env.VERSION }}
docker buildx imagetools create \
-t ghcr.io/${{ github.repository }}:latest \
ghcr.io/${{ github.repository }}:${{ env.VERSION }}
sign:
name: Sign images and create SBOM attestations
needs: publish
runs-on: ubuntu-24.04
permissions:
contents: read
id-token: write
packages: write
env:
VERSION: ${{ github.ref_name }}
COSIGN_YES: 'true'
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Install Trivy
uses: aquasecurity/setup-trivy@e6c2c5e321ed9123bda567646e2f96565e34abe1 # v0.2.4
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Log in to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Sign images
run: |
cosign sign --recursive "ghcr.io/${{ github.repository }}@${{ needs.publish.outputs.image_digest }}"
cosign sign --recursive "${{ env.DOCKER_HUB }}@${{ needs.publish.outputs.image_digest }}"
- name: Generate SBOMs
run: |
trivy image --format cyclonedx --output seerr-ghcr-image-${{ env.VERSION }}.sbom \
"ghcr.io/${{ github.repository }}@${{ needs.publish.outputs.image_digest }}"
trivy image --format cyclonedx --output seerr-dockerhub-image-${{ env.VERSION }}.sbom \
"${{ env.DOCKER_HUB }}@${{ needs.publish.outputs.image_digest }}"
- name: Attest SBOMs
run: |
cosign attest \
--type cyclonedx \
--predicate seerr-ghcr-image-${{ env.VERSION }}.sbom \
"ghcr.io/${{ github.repository }}@${{ needs.publish.outputs.image_digest }}"
cosign attest \
--type cyclonedx \
--predicate seerr-dockerhub-image-${{ env.VERSION }}.sbom \
"${{ env.DOCKER_HUB }}@${{ needs.publish.outputs.image_digest }}"
- name: Upload SBOMs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: sboms-${{ env.VERSION }}
path: '*.sbom'
if-no-files-found: error
retention-days: 1
verify:
name: Verify signatures and attestations
needs: [publish, sign]
runs-on: ubuntu-24.04
permissions:
contents: read
env:
VERSION: ${{ github.ref_name }}
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Verify signatures
run: |
cosign verify "ghcr.io/${{ github.repository }}@${{ needs.publish.outputs.image_digest }}" \
--certificate-identity "https://github.com/${{ github.workflow_ref }}" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
cosign verify "${{ env.DOCKER_HUB }}@${{ needs.publish.outputs.image_digest }}" \
--certificate-identity "https://github.com/${{ github.workflow_ref }}" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
- name: Verify attestations
run: |
cosign verify-attestation "ghcr.io/${{ github.repository }}@${{ needs.publish.outputs.image_digest }}" \
--type cyclonedx \
--certificate-identity "https://github.com/${{ github.workflow_ref }}" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com" > /dev/null
cosign verify-attestation "${{ env.DOCKER_HUB }}@${{ needs.publish.outputs.image_digest }}" \
--type cyclonedx \
--certificate-identity "https://github.com/${{ github.workflow_ref }}" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com" > /dev/null
publish-release:
name: Publish release
needs: [create-draft-release, verify]
runs-on: ubuntu-24.04
permissions:
contents: write
env:
VERSION: ${{ github.ref_name }}
steps:
- name: Publish release
run: gh release edit "${{ env.VERSION }}" --draft=false --repo "${{ github.repository }}"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
discord:
name: Send Discord Notification
needs: publish
needs: publish-release
if: always()
runs-on: ubuntu-24.04
steps:
- name: Determine Workflow Status
- name: Determine status
id: status
run: |
case "${{ needs.publish.result }}" in
case "${{ needs.publish-release.result }}" in
success) echo "status=Success" >> $GITHUB_OUTPUT; echo "colour=3066993" >> $GITHUB_OUTPUT ;;
failure) echo "status=Failure" >> $GITHUB_OUTPUT; echo "colour=15158332" >> $GITHUB_OUTPUT ;;
cancelled) echo "status=Cancelled" >> $GITHUB_OUTPUT; echo "colour=10181046" >> $GITHUB_OUTPUT ;;
*) echo "status=Skipped" >> $GITHUB_OUTPUT; echo "colour=9807270" >> $GITHUB_OUTPUT ;;
esac
- name: Send Discord notification
shell: bash
- name: Send notification
run: |
WEBHOOK="${{ secrets.DISCORD_WEBHOOK }}"
@@ -217,7 +336,7 @@ jobs:
{ "name": "Event", "value": "${{ github.event_name }}", "inline": true },
{ "name": "Triggered by", "value": "${{ github.actor }}", "inline": true },
{ "name": "Workflow", "value": "[${{ github.workflow }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})", "inline": true }
],
]
}]
}
EOF

View File

@@ -0,0 +1,181 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Renovate Helm Hooks
on:
pull_request:
branches:
- develop
paths:
- 'charts/**'
permissions: {}
concurrency:
group: renovate-helm-hooks-${{ github.ref }}
cancel-in-progress: true
jobs:
renovate-post-run:
name: Renovate Bump Chart Version
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
if: github.actor == 'renovate[bot]'
steps:
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: false
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: app-token
with:
app-id: 2138788
private-key: ${{ secrets.APP_SEERR_HELM_PRIVATE_KEY }}
- name: Set up chart-testing
uses: helm/chart-testing-action@0d28d3144d3a25ea2cc349d6e59901c4ff469b3b # v2.7.0
- name: Run chart-testing (list-changed)
id: list-changed
run: |
changed="$(ct list-changed --target-branch ${TARGET_BRANCH})"
if [[ -n "$changed" ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
echo "changed_list=${changed//$'\n'/ }" >> "$GITHUB_OUTPUT"
fi
env:
TARGET_BRANCH: ${{ github.event.repository.default_branch }}
- name: Bump chart version
if: steps.list-changed.outputs.changed == 'true'
env:
CHART: ${{ steps.list-changed.outputs.changed_list }}
run: |
if [[ ! -d "${CHART}" ]]; then
echo "${CHART} directory not found"
exit 0
fi
# Extract current appVersion and chart version from Chart.yaml
APP_VERSION=$(grep -e "^appVersion:" "$CHART/Chart.yaml" | cut -d ":" -f 2 | tr -d '[:space:]' | tr -d '"')
CHART_VERSION=$(grep -e "^version:" "$CHART/Chart.yaml" | cut -d ":" -f 2 | tr -d '[:space:]' | tr -d '"')
# Extract major, minor and patch versions of appVersion
APP_MAJOR_VERSION=$(printf '%s' "$APP_VERSION" | cut -d "." -f 1)
APP_MINOR_VERSION=$(printf '%s' "$APP_VERSION" | cut -d "." -f 2)
APP_PATCH_VERSION=$(printf '%s' "$APP_VERSION" | cut -d "." -f 3)
# Extract major, minor and patch versions of chart version
CHART_MAJOR_VERSION=$(printf '%s' "$CHART_VERSION" | cut -d "." -f 1)
CHART_MINOR_VERSION=$(printf '%s' "$CHART_VERSION" | cut -d "." -f 2)
CHART_PATCH_VERSION=$(printf '%s' "$CHART_VERSION" | cut -d "." -f 3)
# Get previous appVersion from the base commit of the pull request
BASE_COMMIT=$(git merge-base origin/main HEAD)
PREV_APP_VERSION=$(git show "$BASE_COMMIT":"$CHART/Chart.yaml" | grep -e "^appVersion:" | cut -d ":" -f 2 | tr -d '[:space:]' | tr -d '"')
# Extract major, minor and patch versions of previous appVersion
PREV_APP_MAJOR_VERSION=$(printf '%s' "$PREV_APP_VERSION" | cut -d "." -f 1)
PREV_APP_MINOR_VERSION=$(printf '%s' "$PREV_APP_VERSION" | cut -d "." -f 2)
PREV_APP_PATCH_VERSION=$(printf '%s' "$PREV_APP_VERSION" | cut -d "." -f 3)
# Check if the major, minor, or patch version of appVersion has changed
if [[ "$APP_MAJOR_VERSION" != "$PREV_APP_MAJOR_VERSION" ]]; then
# Bump major version of the chart and reset minor and patch versions to 0
CHART_MAJOR_VERSION=$((CHART_MAJOR_VERSION+1))
CHART_MINOR_VERSION=0
CHART_PATCH_VERSION=0
elif [[ "$APP_MINOR_VERSION" != "$PREV_APP_MINOR_VERSION" ]]; then
# Bump minor version of the chart and reset patch version to 0
CHART_MINOR_VERSION=$((CHART_MINOR_VERSION+1))
CHART_PATCH_VERSION=0
elif [[ "$APP_PATCH_VERSION" != "$PREV_APP_PATCH_VERSION" ]]; then
# Bump patch version of the chart
CHART_PATCH_VERSION=$((CHART_PATCH_VERSION+1))
fi
# Update the chart version in Chart.yaml
CHART_NEW_VERSION="${CHART_MAJOR_VERSION}.${CHART_MINOR_VERSION}.${CHART_PATCH_VERSION}"
sed -i "s/^version:.*/version: ${CHART_NEW_VERSION}/" "$CHART/Chart.yaml"
- name: Ensure documentation is updated
if: steps.list-changed.outputs.changed == 'true'
uses: docker://jnorwood/helm-docs:v1.14.2@sha256:7e562b49ab6b1dbc50c3da8f2dd6ffa8a5c6bba327b1c6335cc15ce29267979c
- name: Commit changes
if: steps.list-changed.outputs.changed == 'true'
env:
CHART: ${{ steps.list-changed.outputs.changed_list }}
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
GITHUB_HEAD_REF: ${{ github.head_ref }}
run: |
# Define the target directory
TARGET_DIR="$CHART"
# Fetch deleted files in the target directory
DELETED_FILES=$(git diff --diff-filter=D --name-only HEAD -- "$TARGET_DIR")
# Fetch added/modified files in the target directory
MODIFIED_FILES=$(git diff --diff-filter=ACM --name-only HEAD -- "$TARGET_DIR")
# Create a temporary file for JSON output
FILE_CHANGES_JSON_FILE=$(mktemp)
# Initialize JSON structure in the file
echo '{ "deletions": [], "additions": [] }' > "$FILE_CHANGES_JSON_FILE"
# Add deletions
for file in $DELETED_FILES; do
jq --arg path "$file" '.deletions += [{"path": $path}]' "$FILE_CHANGES_JSON_FILE" > "$FILE_CHANGES_JSON_FILE.tmp"
mv "$FILE_CHANGES_JSON_FILE.tmp" "$FILE_CHANGES_JSON_FILE"
done
# Add additions (new or modified files)
for file in $MODIFIED_FILES; do
BASE64_CONTENT=$(base64 -w 0 <"$file") # Encode file content
jq --arg path "$file" --arg content "$BASE64_CONTENT" \
'.additions += [{"path": $path, "contents": $content}]' "$FILE_CHANGES_JSON_FILE" > "$FILE_CHANGES_JSON_FILE.tmp"
mv "$FILE_CHANGES_JSON_FILE.tmp" "$FILE_CHANGES_JSON_FILE"
done
# Create a temporary file for the final JSON payload
JSON_PAYLOAD_FILE=$(mktemp)
# Construct the final JSON using jq and store it in a file
jq -n --arg repo "$GITHUB_REPOSITORY" \
--arg branch "$GITHUB_HEAD_REF" \
--arg message "fix: post upgrade changes from renovate" \
--arg expectedOid "$GITHUB_SHA" \
--slurpfile fileChanges "$FILE_CHANGES_JSON_FILE" \
'{
query: "mutation ($input: CreateCommitOnBranchInput!) {
createCommitOnBranch(input: $input) {
commit {
url
}
}
}",
variables: {
input: {
branch: {
repositoryNameWithOwner: $repo,
branchName: $branch
},
message: { headline: $message },
fileChanges: $fileChanges[0],
expectedHeadOid: $expectedOid
}
}
}' > "$JSON_PAYLOAD_FILE"
# Call GitHub API
curl https://api.github.com/graphql -f \
-sSf -H "Authorization: Bearer $GITHUB_TOKEN" \
--data "@$JSON_PAYLOAD_FILE"
# Clean up temporary files
rm "$FILE_CHANGES_JSON_FILE" "$JSON_PAYLOAD_FILE"

111
.github/workflows/seerr-labeller.yml vendored Normal file
View File

@@ -0,0 +1,111 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: 'Seerr Labeller'
on:
pull_request_target:
types: [labeled, unlabeled, reopened]
issues:
types: [labeled, unlabeled, reopened]
permissions: {}
jobs:
ai-generated-support:
if: >
github.event_name == 'pull_request_target' &&
(github.event.label.name == 'ai-generated' || (github.event.action == 'reopened' && contains(github.event.pull_request.labels.*.name, 'ai-generated')))
runs-on: ubuntu-24.04
concurrency:
group: ai-generated-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions:
pull-requests: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
NUMBER: ${{ github.event.pull_request.number }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
steps:
- name: Label added, comment and close pull request
if: github.event.action == 'labeled' && github.event.label.name == 'ai-generated'
shell: bash
env:
BODY: >
:wave: @${{ env.PR_AUTHOR }}, thank you for your contribution!
However, this pull request has been closed because it appears to contain a significant amount of AI-generated code without sufficient human review or supervision.
AI-generated code can often introduce subtle bugs, poor design patterns, or inconsistent styles that make long-term maintenance difficult and reduce overall code quality. For the sake of the project's future stability and readability, we require that all contributions meet our established coding standards and demonstrate clear developer oversight.
This pull request is also too large for effective human review. Please discuss with us on how to break down these changes into smaller, more focused PRs to ensure a thorough and efficient review process.
If you'd like to revise and resubmit your changes with careful review and cleanup, we'd be happy to take another look.
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh pr comment "$NUMBER" -R "$GH_REPO" -b "$BODY" || true
retry gh pr close "$NUMBER" -R "$GH_REPO" || true
gh pr lock "$NUMBER" -R "$GH_REPO" -r "spam" || true
- name: Label removed, reopen and unlock pull request
if: github.event.action == 'unlabeled' && github.event.label.name == 'ai-generated'
shell: bash
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh pr reopen "$NUMBER" -R "$GH_REPO" || true
gh pr unlock "$NUMBER" -R "$GH_REPO" || true
- name: Remove AI-generated label on manual reopen
if: github.event.action == 'reopened'
shell: bash
run: |
gh pr edit "$NUMBER" -R "$GH_REPO" --remove-label "ai-generated" || true
gh pr unlock "$NUMBER" -R "$GH_REPO" || true
support:
if: >
github.event_name == 'issues' &&
(github.event.label.name == 'support' ||
(github.event.action == 'reopened' && contains(github.event.issue.labels.*.name, 'support')))
runs-on: ubuntu-24.04
concurrency:
group: support-${{ github.event.issue.number }}
cancel-in-progress: true
permissions:
issues: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
NUMBER: ${{ github.event.issue.number }}
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
steps:
- name: Label added, comment and close issue
if: github.event.action == 'labeled' && github.event.label.name == 'support'
shell: bash
env:
BODY: >
:wave: @${{ env.ISSUE_AUTHOR }}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please use our support channels
to get help with Seerr.
- [Discord](https://discord.gg/seerr)
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue comment "$NUMBER" -R "$GH_REPO" -b "$BODY" || true
retry gh issue close "$NUMBER" -R "$GH_REPO" || true
gh issue lock "$NUMBER" -R "$GH_REPO" -r "off_topic" || true
- name: Label removed, reopen and unlock issue
if: github.event.action == 'unlabeled' && github.event.label.name == 'support'
shell: bash
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue reopen "$NUMBER" -R "$GH_REPO" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true
- name: Remove support label on manual reopen
if: github.event.action == 'reopened'
shell: bash
run: |
gh issue edit "$NUMBER" -R "$GH_REPO" --remove-label "support" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true

View File

@@ -1,58 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: 'Support requests'
on:
issues:
types: [labeled, unlabeled, reopened]
permissions:
issues: read
concurrency:
group: support-${{ github.event.issue.number }}
cancel-in-progress: true
jobs:
support:
if: github.event.label.name == 'support' || github.event.action == 'reopened'
runs-on: ubuntu-24.04
permissions:
issues: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
NUMBER: ${{ github.event.issue.number }}
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
steps:
- name: Label added, comment and close issue
if: github.event.action == 'labeled' && github.event.label.name == 'support'
shell: bash
env:
BODY: >
:wave: @${{ env.ISSUE_AUTHOR }}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please use our support channels
to get help with Seerr.
- [Discord](https://discord.gg/seerr)
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue comment "$NUMBER" -R "$GH_REPO" -b "$BODY" || true
retry gh issue close "$NUMBER" -R "$GH_REPO" || true
gh issue lock "$NUMBER" -R "$GH_REPO" -r "off_topic" || true
- name: Reopened or label removed, unlock issue
if: github.event.action == 'unlabeled' && github.event.label.name == 'support'
shell: bash
run: |
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
retry gh issue reopen "$NUMBER" -R "$GH_REPO" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true
- name: Remove support label on manual reopen
if: github.event.action == 'reopened'
shell: bash
run: |
gh issue edit "$NUMBER" -R "$GH_REPO" --remove-label "support" || true
gh issue unlock "$NUMBER" -R "$GH_REPO" || true

View File

@@ -36,7 +36,7 @@ jobs:
package-manager-cache: false
- name: Pnpm Setup
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Get pnpm store directory
shell: sh

View File

@@ -56,6 +56,6 @@ jobs:
ignore-unfixed: true
- name: Upload SARIF to code scanning
uses: github/codeql-action/upload-sarif@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/upload-sarif@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
sarif_file: trivy.sarif

3
.gitignore vendored
View File

@@ -71,3 +71,6 @@ tsconfig.tsbuildinfo
# Config Cache Directory
config/cache
# Docker compose
compose.override.yaml

View File

@@ -2,7 +2,6 @@
.next/
dist/
config/
CHANGELOG.md
pnpm-lock.yaml
cypress/config/settings.cypress.json

File diff suppressed because it is too large Load Diff

View File

@@ -151,9 +151,9 @@ When adding new UI text, please try to adhere to the following guidelines:
## Translation
We use [Weblate](https://jellyseerr.borgcube.de/projects/jellyseerr/jellyseerr-frontend/) for our translations, and your help with localizing Seerr would be greatly appreciated! If your language is not listed below, please [open a feature request](/../../issues/new/choose).
We use [Weblate](https://translate.seerr.dev/projects/seerr/seerr-frontend/) for our translations, and your help with localizing Seerr would be greatly appreciated! If your language is not listed below, please [open a feature request](/../../issues/new/choose).
<a href="https://jellyseerr.borgcube.de/engage/jellysseerr/"><img src="https://jellyseerr.borgcube.de/widget/jellyseerr/multi-auto.svg" alt="Translation status" /></a>
<a href="https://translate.seerr.dev/engage/seerr/"><img src="https://translate.seerr.dev/widget/seerr/multi-auto.svg" alt="Translation status" /></a>
## Migrations

View File

@@ -13,7 +13,10 @@ WORKDIR /app
FROM base AS prod-deps
RUN --mount=type=cache,id=pnpm,target=/pnpm/store CI=true pnpm install --prod --frozen-lockfile
FROM base as build
FROM base AS build
ARG COMMIT_TAG
ENV COMMIT_TAG=${COMMIT_TAG}
RUN \
case "${TARGETPLATFORM}" in \

View File

@@ -7,8 +7,8 @@
</p>
<p align="center">
<a href="https://discord.gg/seerr"><img src="https://img.shields.io/discord/783137440809746482" alt="Discord"></a>
<a href="https://hub.docker.com/r/fallenbagel/jellyseerr"><img src="https://img.shields.io/docker/pulls/fallenbagel/jellyseerr" alt="Docker pulls"></a>
<a href="http://translate.jellyseerr.dev/engage/jellyseerr/"><img src="http://translate.jellyseerr.dev/widget/jellyseerr/jellyseerr-frontend/svg-badge.svg" alt="Translation status" /></a>
<a href="https://hub.docker.com/r/seerr/seerr"><img src="https://img.shields.io/docker/pulls/seerr/seerr" alt="Docker pulls"></a>
<a href="https://translate.seerr.dev/engage/seerr/"><img src="https://translate.seerr.dev/widget/seerr/svg-badge.svg" alt="Translation status" /></a>
<a href="https://github.com/seerr-team/seerr/blob/develop/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/seerr-team/seerr"></a>
**Seerr** is a free and open source software application for managing requests for your media library. It integrates with the media server of your choice: [Jellyfin](https://jellyfin.org), [Plex](https://plex.tv), and [Emby](https://emby.media/). In addition, it integrates with your existing services, such as **[Sonarr](https://sonarr.tv/)**, **[Radarr](https://radarr.video/)**.
@@ -36,12 +36,6 @@ Check out our documentation for instructions on how to install and run Seerr:
https://docs.seerr.dev/getting-started/
### Packages:
Archlinux: [AUR](https://aur.archlinux.org/packages/jellyseerr)
Nix: [Nixpkg](https://search.nixos.org/packages?channel=unstable&show=jellyseerr)
## Preview
<img src="./public/preview.jpg">

View File

@@ -4,8 +4,8 @@ name: seerr-chart
description: Seerr helm chart for Kubernetes
type: application
version: 3.0.0
# renovate: image=ghcr.io/fallenbagel/jellyseerr
appVersion: '2.7.3'
# renovate: image=ghcr.io/seerr-team/seerr
appVersion: '3.0.0'
maintainers:
- name: Seerr Team
url: https://github.com/orgs/seerr-team/people

View File

@@ -1,6 +1,6 @@
# seerr-chart
![Version: 3.0.0](https://img.shields.io/badge/Version-3.0.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 2.7.3](https://img.shields.io/badge/AppVersion-2.7.3-informational?style=flat-square)
![Version: 3.0.0](https://img.shields.io/badge/Version-3.0.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.0.0](https://img.shields.io/badge/AppVersion-3.0.0-informational?style=flat-square)
Seerr helm chart for Kubernetes
@@ -20,11 +20,15 @@ Seerr helm chart for Kubernetes
Kubernetes: `>=1.23.0-0`
## Installation
Refer to [Seerr kubernetes documentation](https://docs.seerr.dev/getting-started/kubernetes)
## Update Notes
### Updating to 3.0.0
Nothing has changed; we just rebranded the `jellyseerr` Helm chart to `seerr` 🥳.
Nothing has changed; we just rebranded the `jellyseerr` Helm chart to `seerr` 🥳 refer to our [Migration guide](https://docs.seerr.dev/migration-guide).
### Updating to 2.7.0
@@ -66,12 +70,20 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
| nodeSelector | object | `{}` | |
| podAnnotations | object | `{}` | |
| podLabels | object | `{}` | |
| podSecurityContext | object | `{}` | |
| podSecurityContext.fsGroup | int | `1000` | |
| podSecurityContext.fsGroupChangePolicy | string | `"OnRootMismatch"` | |
| probes.livenessProbe | object | `{}` | Configure liveness probe |
| probes.readinessProbe | object | `{}` | Configure readiness probe |
| probes.startupProbe | string | `nil` | Configure startup probe |
| resources | object | `{}` | |
| securityContext | object | `{}` | |
| securityContext.allowPrivilegeEscalation | bool | `false` | |
| securityContext.capabilities.drop[0] | string | `"ALL"` | |
| securityContext.privileged | bool | `false` | |
| securityContext.readOnlyRootFilesystem | bool | `false` | |
| securityContext.runAsGroup | int | `1000` | |
| securityContext.runAsNonRoot | bool | `true` | |
| securityContext.runAsUser | int | `1000` | |
| securityContext.seccompProfile.type | string | `"RuntimeDefault"` | |
| service.port | int | `80` | |
| service.type | string | `"ClusterIP"` | |
| serviceAccount.annotations | object | `{}` | Annotations to add to the service account |

View File

@@ -14,11 +14,15 @@
{{ template "chart.requirementsSection" . }}
## Installation
Refer to [Seerr kubernetes documentation](https://docs.seerr.dev/getting-started/kubernetes)
## Update Notes
### Updating to 3.0.0
Nothing change we just rebranded `jellyseerr` helm-chart to `seerr` :)
Nothing has changed; we just rebranded the `jellyseerr` Helm chart to `seerr` 🥳 refer to our [Migration guide](https://docs.seerr.dev/migration-guide).
### Updating to 2.7.0

View File

@@ -50,16 +50,22 @@ serviceAccount:
podAnnotations: {}
podLabels: {}
podSecurityContext: {}
# fsGroup: 2000
podSecurityContext:
fsGroup: 1000
fsGroupChangePolicy: OnRootMismatch
securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: false
runAsNonRoot: true
privileged: false
runAsUser: 1000
runAsGroup: 1000
seccompProfile:
type: RuntimeDefault
service:
type: ClusterIP

View File

@@ -31,6 +31,6 @@ services:
ports:
- '5432:5432'
volumes:
- postgres:var/lib/postgresql/18/docker
- postgres:/var/lib/postgresql
volumes:
postgres:

View File

@@ -20,7 +20,7 @@ DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging.
## PostgreSQL Options
:::caution
When migrating Postgres from version 17 to 18 in Docker, note that the data mount point has changed. Instead of using `/var/lib/postgresql/data`, the correct mount path is now `/var/lib/postgresql/18/docker`.
When migrating Postgres from version 17 to 18 in Docker, note that the data mount point has changed. Instead of using `/var/lib/postgresql/data`, the correct mount path is now `/var/lib/postgresql`.
Refer to the [PostgreSQL Docker documentation](https://hub.docker.com/_/postgres/#pgdata) to learn how to migrate or opt out of this change.
:::

View File

@@ -266,3 +266,36 @@ Add the following Location block to your existing Server configuration.
</TabItem>
</Tabs>
## HAProxy (v3)
:::warning
This is a third-party documentation maintained by the community. We can't provide support for this setup and are unable to test it.
:::
Add the following frontend and backend configurations for your seerr instance:
```haproxy
frontend seerr-frontend
bind 0.0.0.0:80
bind 0.0.0.0:443 ssl crt /etc/ssl/private/seerr.example.com.pem
mode http
log global
option httplog
option http-keep-alive
http-request set-header X-Real-IP %[src]
option forwardfor
acl seerr hdr(host) -i seerr.example.com
redirect scheme https code 301 if !{ ssl_fc }
use_backend seerr-backend if seerr
backend seerr-backend
mode http
log global
option httplog
http-response set-header Strict-Transport-Security max-age=15552000
option httpchk GET /api/v1/status
timeout connect 30000
timeout server 30000
retries 3
server seerr 127.0.0.1:5055 check inter 1000
```

View File

@@ -24,10 +24,9 @@ import TabItem from '@theme/TabItem';
```bash
sudo mkdir -p /opt/seerr && cd /opt/seerr
```
2. Clone the Seerr repository and checkout the develop branch:
2. Clone the Seerr repository and checkout the main branch:
```bash
git clone https://github.com/fallenbagel/jellyseerr.git
cd jellyseerr
git clone https://github.com/seerr-team/seerr.git .
git checkout main
```
3. Install the dependencies:
@@ -199,9 +198,9 @@ pm2 status seerr
mkdir C:\seerr
cd C:\seerr
```
2. Clone the Seerr repository and checkout the develop branch:
2. Clone the Seerr repository and checkout the main branch:
```powershell
git clone https://github.com/fallenbagel/jellyseerr.git .
git clone https://github.com/seerr-team/seerr.git .
git checkout main
```
3. Install the dependencies:

View File

@@ -11,6 +11,16 @@ Details on how to install Docker can be found on the [official Docker website](h
Refer to [Configuring Databases](/extending-seerr/database-config#postgresql-options) for details on how to configure your database.
:::
:::info
An alternative Docker image is available on Docker Hub for this project. You can find it at [Docker Hub Repository Link](https://hub.docker.com/r/seerr/seerr)
:::
:::info
All official Seerr images are cryptographically signed and include a verified [Software Bill of Materials (SBOM)](https://cyclonedx.org/).
To confirm that the container image you are using is authentic and unmodified, please refer to the [Verifying Signed Artifacts](/using-seerr/advanced/verifying-signed-artifacts#verifying-signed-images) guide.
:::
## Unix (Linux, macOS)
:::warning
Be sure to replace `/path/to/appdata/config` in the below examples with a valid host directory path. If this volume mount is not configured correctly, your Seerr settings/data will not be persisted when the container is recreated (e.g., when updating the image or rebooting your machine).
@@ -38,7 +48,7 @@ docker run -d \
-p 5055:5055 \
-v /path/to/appdata/config:/app/config \
--restart unless-stopped \
ghcr.io/seerr-team/seerr
ghcr.io/seerr-team/seerr:latest
```
The argument `-e PORT=5055` is optional.
@@ -62,7 +72,7 @@ docker stop seerr && docker rm seerr
```
Pull the latest image:
```bash
docker pull ghcr.io/seerr-team/seerr
docker pull ghcr.io/seerr-team/seerr:latest
```
Finally, run the container with the same parameters originally used to create the container:
```bash
@@ -125,15 +135,6 @@ You may alternatively use a third-party mechanism like [dockge](https://github.c
</TabItem>
</Tabs>
## Unraid
1. Ensure you have the **Community Applications** plugin installed.
2. Inside the **Community Applications** app store, search for **Seerr**.
3. Click the **Install Button**.
4. On the following **Add Container** screen, make changes to the **Host Port** and **Host Path 1** \(Appdata\) as needed.
5. Click apply and access "Seerr" at your `<ServerIP:HostPort>` in a web browser.
## Windows
Please refer to the [Docker Desktop for Windows user manual](https://docs.docker.com/docker-for-windows/) for details on how to install Docker on Windows. There is no need to install a Linux distro if using named volumes like in the example below.
@@ -165,7 +166,7 @@ docker run -d \
-p 5055:5055 \
-v seerr-data:/app/config \
--restart unless-stopped \
ghcr.io/seerr-team/seerr
ghcr.io/seerr-team/seerr:latest
```
The argument `-e PORT=5055` is optional.

View File

@@ -1,21 +1,26 @@
---
title: Kubernetes (Advanced)
description: Install Jellyseerr in Kubernetes
sidebar_position: 5
description: Install Seerr in Kubernetes
sidebar_position: 3
---
# Kubernetes
:::info
:::warning
This method is not recommended for most users. It is intended for advanced users who are using Kubernetes.
:::
:::info
All official Seerr charts are cryptographically signed and include a verified [Software Bill of Materials (SBOM)](https://cyclonedx.org/).
To confirm that the chart you are using is authentic and unmodified, please refer to the [Verifying Signed Artifacts](/using-seerr/advanced/verifying-signed-artifacts#verifying-signed-helm-charts) guide.
:::
## Installation
```console
helm install seerr oci://ghcr.io/seerr-team/seerr/seerr-chart
```
Helm values can be found in the Jellyseerr repository under [charts/jellyseerr-chart/README.md](https://github.com/fallenbagel/jellyseerr/tree/develop/charts/jellyseerr-chart).
Helm values can be found in the Seerr repository under [charts/seerr-chart/README.md](https://github.com/seerr-team/seerr/tree/develop/charts/seerr-chart).
Verify the signature with [cosign](https://docs.sigstore.dev/cosign/system_config/installation/) (replace [tag], with the TAG you want to verify) :
```console
cosign verify ghcr.io/fallenbagel/jellyseerr/jellyseerr-chart:[tag] --certificate-identity=https://github.com/fallenbagel/jellyseerr/.github/workflows/helm.yml@refs/heads/main --certificate-oidc-issuer=https://token.ac
tions.githubusercontent.com
cosign verify ghcr.io/seerr-team/seerr/seerr-chart:[tag] --certificate-identity=https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main --certificate-oidc-issuer=https://token.actions.githubusercontent.com
```

View File

@@ -1,271 +0,0 @@
---
title: Nix Package Manager (Advanced)
description: Install Seerr using Nix
sidebar_position: 3
---
import { SeerrVersion, NixpkgVersion } from '@site/src/components/SeerrVersion';
import Admonition from '@theme/Admonition';
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# Nix Package Manager (Advanced)
:::info
This method is not recommended for most users. It is intended for advanced users who are using Nix as their package manager.
:::
export const VersionMismatchWarning = () => {
let seerrVersion = null;
let nixpkgVersions = null;
try {
seerrVersion = SeerrVersion();
nixpkgVersions = NixpkgVersion();
} catch (err) {
return (
<Admonition type="error">
Failed to load version information. Error: {err.message || JSON.stringify(err)}
</Admonition>
);
}
if (!nixpkgVersions || nixpkgVersions.error) {
return (
<Admonition type="error">
Failed to fetch Nixpkg versions: {nixpkgVersions?.error || 'Unknown error'}
</Admonition>
);
}
const isUnstableUpToDate = seerrVersion === nixpkgVersions.unstable;
const isStableUpToDate = seerrVersion === nixpkgVersions.stable;
return (
<>
{!isStableUpToDate ? (
<Admonition type="warning">
The{' '}
<a href="https://github.com/NixOS/nixpkgs/blob/nixos-24.11/pkgs/servers/jellyseerr/default.nix#L14">
upstream Jellyseerr Nix Package (v{nixpkgVersions.stable})
</a>{' '}
is not <b>up-to-date</b>. If you want to use <b>Jellyseerr v{seerrVersion}</b>,{' '}
{isUnstableUpToDate ? (
<>
consider using the{' '}
<a href="https://github.com/NixOS/nixpkgs/blob/nixos-unstable/pkgs/by-name/je/jellyseerr/package.nix">
unstable package
</a>{' '}
instead.
</>
) : (
<>
you will need to{' '}
<a href="#overriding-the-package-derivation">override the package derivation</a>.
</>
)}
</Admonition>
) : null}
</>
);
};
<VersionMismatchWarning />
## Installation
To get up and running with jellyseerr using Nix, you can add the following to your `configuration.nix`:
```nix
{ config, pkgs, ... }:
{
services.jellyseerr.enable = true;
}
```
If you want more advanced configuration options, you can use the following:
<Tabs groupId="nixpkg-methods" queryString>
<TabItem value="default" label="Default Configurations">
```nix
{ config, pkgs, ... }:
{
services.jellyseerr = {
enable = true;
port = 5055;
openFirewall = true;
package = pkgs.jellyseerr; # Use the unstable package if stable is not up-to-date
};
}
```
</TabItem>
<TabItem value="custom" label="Database Configurations">
In order to use postgres, you will need to add override the default module of jellyseerr with the following as the current default module is not compatible with postgres:
```nix
{
config,
pkgs,
lib,
...
}:
with lib;
let
cfg = config.services.jellyseerr;
in
{
meta.maintainers = [ maintainers.camillemndn ];
disabledModules = [ "services/misc/jellyseerr.nix" ];
options.services.jellyseerr = {
enable = mkEnableOption ''Jellyseerr, a requests manager for Jellyfin'';
openFirewall = mkOption {
type = types.bool;
default = false;
description = ''Open port in the firewall for the Jellyseerr web interface.'';
};
port = mkOption {
type = types.port;
default = 5055;
description = ''The port which the Jellyseerr web UI should listen to.'';
};
package = mkOption {
type = types.package;
default = pkgs.jellyseerr;
defaultText = literalExpression "pkgs.jellyseerr";
description = ''
Jellyseerr package to use.
'';
};
databaseConfig = mkOption {
type = types.attrsOf types.str;
default = {
type = "sqlite";
configDirectory = "config";
logQueries = "false";
};
description = ''
Database configuration. For "sqlite", only "type", "configDirectory", and "logQueries" are relevant.
For "postgres", include host, port, user, pass, name, and optionally socket.
Example:
{
type = "postgres";
socket = "/run/postgresql";
user = "jellyseerr";
name = "jellyseerr";
logQueries = "false";
}
or
{
type = "postgres";
host = "localhost";
port = "5432";
user = "dbuser";
pass = "password";
name = "jellyseerr";
logQueries = "false";
}
or
{
type = "sqlite";
configDirectory = "config";
logQueries = "false";
}
'';
};
};
config = mkIf cfg.enable {
systemd.services.jellyseerr = {
description = "Jellyseerr, a requests manager for Jellyfin";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment =
let
dbConfig = cfg.databaseConfig;
in
{
PORT = toString cfg.port;
DB_TYPE = toString dbConfig.type;
CONFIG_DIRECTORY = toString dbConfig.configDirectory or "";
DB_LOG_QUERIES = toString dbConfig.logQueries;
DB_HOST = if dbConfig.type == "postgres" && !(hasAttr "socket" dbConfig) then toString dbConfig.host or "" else "";
DB_PORT = if dbConfig.type == "postgres" && !(hasAttr "socket" dbConfig) then toString dbConfig.port or "" else "";
DB_SOCKET_PATH = if dbConfig.type == "postgres" && hasAttr "socket" dbConfig then toString dbConfig.socket or "" else "";
DB_USER = if dbConfig.type == "postgres" then toString dbConfig.user or "" else "";
DB_PASS = if dbConfig.type == "postgres" then toString dbConfig.pass or "" else "";
DB_NAME = if dbConfig.type == "postgres" then toString dbConfig.name or "" else "";
};
serviceConfig = {
Type = "exec";
StateDirectory = "jellyseerr";
WorkingDirectory = "${cfg.package}/libexec/jellyseerr";
DynamicUser = true;
ExecStart = "${cfg.package}/bin/jellyseerr";
BindPaths = [ "/var/lib/jellyseerr/:${cfg.package}/libexec/jellyseerr/config/" ];
Restart = "on-failure";
ProtectHome = true;
ProtectSystem = "strict";
PrivateTmp = true;
PrivateDevices = true;
ProtectHostname = true;
ProtectClock = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectKernelLogs = true;
ProtectControlGroups = true;
NoNewPrivileges = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
RemoveIPC = true;
PrivateMounts = true;
};
};
networking.firewall = mkIf cfg.openFirewall { allowedTCPPorts = [ cfg.port ]; };
};
}
```
Then, import the module into your `configuration.nix`:
```nix
{ config, pkgs, ... }:
{
imports = [ ./modules/jellyseerr.nix ];
services.jellyseerr = {
enable = true;
port = 5055;
openFirewall = true;
package = pkgs.unstable.jellyseerr; # use the unstable package if stable is not up-to-date
databaseConfig = {
type = "postgres";
host = "localhost"; # or socket: "/run/postgresql"
port = "5432"; # if using socket, this is not needed
user = "jellyseerr";
pass = "jellyseerr";
name = "jellyseerr";
logQueries = "false";
};
}
}
```
</TabItem>
</Tabs>
After adding the configuration to your `configuration.nix`, you can run the following command to install jellyseerr:
```bash
nixos-rebuild switch
```
After rebuild is complete jellyseerr should be running, verify that it is with the following command.
```bash
systemctl status jellyseerr
```
:::info
You can now access Seerr by visiting `http://localhost:5055` in your web browser.
:::

View File

@@ -1,16 +1,15 @@
---
title: AUR (Arch User Repository)
title: AUR (Advanced)
description: Install Seerr using the Arch User Repository
sidebar_position: 4
sidebar_position: 2
---
# AUR (Arch User Repository)
:::note Disclaimer
This AUR package is not maintained by us but by a third party. Please refer to the maintainer for any issues.
# AUR
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
:::info
:::warning
This method is not recommended for most users. It is intended for advanced users who are using Arch Linux or an Arch-based distribution.
:::
@@ -24,12 +23,12 @@ import TabItem from '@theme/TabItem';
<Tabs groupId="aur-methods" queryString>
<TabItem value="yay" label="yay">
```bash
yay -S jellyseerr
yay -S seerr
```
</TabItem>
<TabItem value="paru" label="paru">
```bash
paru -S jellyseerr
paru -S seerr
```
</TabItem>
</Tabs>
@@ -39,5 +38,5 @@ After installing Seerr, configure it by visiting the web UI at `http://[address]
:::
:::tip
You can find the environment file at `/etc/conf.d/jellyseerr` and the service file at `/etc/systemd/system/jellyseerr.service`.
You can find the environment file at `/etc/conf.d/seerr` and the service file at `/etc/systemd/system/seerr.service`.
:::

View File

@@ -0,0 +1,11 @@
---
title: Third-party Installation Methods
---
import DocCardList from '@theme/DocCardList';
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
<DocCardList />

View File

@@ -0,0 +1,17 @@
---
title: Nix Package Manager (Advanced)
description: Install Seerr using Nixpkgs
sidebar_position: 1
---
import { SeerrVersion, NixpkgVersion } from '@site/src/components/SeerrVersion';
import Admonition from '@theme/Admonition';
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# Nix Package Manager
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
Refer to [NixOS documentation](https://search.nixos.org/options?channel=25.05&query=seerr)

View File

@@ -0,0 +1,20 @@
---
title: Unraid (Advanced)
description: Install Seerr using Unraid
sidebar_position: 3
---
# Unraid
:::warning
Third-party installation methods are maintained by the community. The Seerr team is not responsible for these packages.
:::
:::warning
This method is not recommended for most users. It is intended for advanced users who are using Unraid.
:::
1. Ensure you have the **Community Applications** plugin installed.
2. Inside the **Community Applications** app store, search for **Seerr**.
3. Click the **Install Button**.
4. On the following **Add Container** screen, make changes to the **Host Port** and **Host Path 1** \(Appdata\) as needed.
5. Click apply and access "Seerr" at your `<ServerIP:HostPort>` in a web browser.

168
docs/migration-guide.mdx Normal file
View File

@@ -0,0 +1,168 @@
---
title: Migration guide
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
Whether you come from Overseerr or Jellyseerr, you don't need to perform any manual migration steps, your instance will automatically be migrated to Seerr.
This migration will run automatically the first time you start your instance using the Seerr codebase (Docker image or source build or Kubernetes, etc.).
An additional migration will happen for Overseerr users, to migrate their configuration to the new codebase.
:::warning
Before doing anything you should backup your existing instance so that you can rollback in case something goes wrong.
See [Backups](/using-seerr/backups) for details on how to properly backup your instance.
:::
## Docker
Refer to [Seerr Docker Documentation](/getting-started/docker), all of our examples have been updated to reflect the below change.
Changes :
- Renamed all references from `overseerr` or `jellyseerr` to `seerr`.
- The container image reference has been updated.
- The container can now be run as a non-root user (`node` user); remove the `user` directive if you have configured it.
- The container no longer provides an init process, so you must configure it by adding `init: true` for Docker Compose or `--init` for the Docker CLI.
:::info
**Config folder permissions**: Since the container now runs as the `node` user (UID 1000), you must ensure your config folder has the correct permissions. The `node` user must have read and write access to the `/app/config` directory.
If you're migrating from a previous installation, you may need to update the ownership of your config folder:
```bash
docker run --rm -v /path/to/appdata/config:/data alpine chown -R 1000:1000 /data
```
This ensures the `node` user (UID 1000) owns the config directory and can read and write to it.
:::
### Unix
Summary of changes :
<Tabs groupId="docker-methods" queryString>
<TabItem value="docker-compose" label="Docker compose">
```yaml {3-6}
---
services:
seerr:
image: ghcr.io/seerr-team/seerr:latest
init: true
container_name: seerr
environment:
- LOG_LEVEL=debug
- TZ=Asia/Tashkent
- PORT=5055 #optional
ports:
- 5055:5055
volumes:
- /path/to/appdata/config:/app/config
healthcheck:
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
start_period: 20s
timeout: 3s
interval: 15s
retries: 3
restart: unless-stopped
```
</TabItem>
<TabItem value="docker-cli" label="Docker CLI">
```bash {2-3,10}
docker run -d \
--name seerr \
--init \
-e LOG_LEVEL=debug \
-e TZ=Asia/Tashkent \
-e PORT=5055 \
-p 5055:5055 \
-v /path/to/appdata/config:/app/config \
--restart unless-stopped \
ghcr.io/seerr-team/seerr:latest
```
</TabItem>
</Tabs>
### Windows
Summary of changes :
<Tabs groupId="docker-methods" queryString>
<TabItem value="docker-compose" label="Docker compose">
```yaml {3-6,13,23}
---
services:
seerr:
image: ghcr.io/seerr-team/seerr:latest
init: true
container_name: seerr
environment:
- LOG_LEVEL=debug
- TZ=Asia/Tashkent
ports:
- 5055:5055
volumes:
- seerr-data:/app/config
healthcheck:
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
start_period: 20s
timeout: 3s
interval: 15s
retries: 3
restart: unless-stopped
volumes:
seerr-data:
external: true
```
</TabItem>
<TabItem value="docker-cli" label="Docker CLI">
```bash {2-3,8,10}
docker run -d \
--name seerr \
--init \
-e LOG_LEVEL=debug \
-e TZ=Asia/Tashkent \
-e PORT=5055 \
-p 5055:5055 \
-v seerr-data:/app/config \
--restart unless-stopped \
ghcr.io/seerr-team/seerr:latest
```
</TabItem>
</Tabs>
## Kubernetes
Refer to [Seerr Kubernetes Documentation](/getting-started/kubernetes), all of our examples have been updated to reflect the below change.
Changes :
- All references to `jellyseerr` have been renamed to `seerr` in the manifests.
- The container image reference has been updated.
- The default `securityContext` and `podSecurityContext` have been updated to support running the container without root permissions.
Summary of changes :
<Tabs groupId="kubernetes-values" queryString>
<TabItem value="old" label="Old values">
```yaml
image:
repository: fallenbagel/jellyseerr
podSecurityContext: {}
securityContext: {}
```
</TabItem>
<TabItem value="new" label="New values">
```yaml
image:
repository: seerr-team/seerr
podSecurityContext:
fsGroup: 1000
fsGroupChangePolicy: OnRootMismatch
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: false
runAsNonRoot: true
privileged: false
runAsUser: 1000
runAsGroup: 1000
seccompProfile:
type: RuntimeDefault
```
</TabItem>
</Tabs>

View File

@@ -103,7 +103,7 @@ If you can't change your DNS servers or force IPV4 resolution, you can use Seerr
In some places (like China), the ISP blocks not only the DNS resolution but also the connection to the TMDB API.
You can configure Seerr to use a proxy with the [HTTP(S) Proxy](/using-seerr/settings/general#https-proxy) setting.
You can configure Seerr to use a proxy with the [HTTP(S) Proxy](/using-seerr/settings/general#enable-proxy-support) setting.
### Option 3: Force IPV4 resolution first
@@ -174,4 +174,36 @@ This can happen if you have a new installation of Jellyfin/Emby or if you have c
This process should restore your admin privileges while preserving your settings.
## Failed to enable web push notifications
### Option 1: You are using Pi-hole
When using Pi-hole, you need to whitelist the proper domains in order for the queries to not be intercepted and blocked by Pi-hole.
If you are using a chromium based browser (eg: Chrome, Brave, Edge...), the domain you need to whitelist is `fcm.googleapis.com`
If you are using Firefox, the domain you need to whitelist is `push.services.mozilla.com`
1. Log into your Pi-hole through the admin interface, then click on Domains situated under GROUP MANAGEMENT.
2. Add the domain corresponding to your browser in the `Domain to be added` field and then click on Add to allowed domains.
3. Now in order for those changes to be used you need to flush your current dns cache.
4. You can do so by using this command line in your Pi-hole terminal:
```bash
pihole restartdns
```
If this command fails (which is unlikely), use this equivalent:
```bash
pihole -f && pihole restartdns
```
5. Then restart your Seerr instance and try to enable the web push notifications again.
### Option 2: You are using Brave browser
Brave is a "De-Googled" browser. So by default or if you refused a prompt in the past, it cuts the access to the FCM (Firebase Cloud Messaging) service, which is mandatory for the web push notifications on Chromium based browsers.
1. Open Brave and paste this address in the url bar: `brave://settings/privacy`
2. Look for the option: "Use Google services for push messaging"
3. Activate this option
4. Relaunch Brave completely
5. You should now see the notifications prompt appearing instead of an error message.
If you still encounter issues, please reach out on our support channels.

View File

@@ -0,0 +1,15 @@
---
title: Advanced Features
description: Advanced configuration and use cases.
sidebar_position: 6
---
# Advanced Features
## Advanced Configuration and Use Cases
Seerr currently offers advanced features for power users and specific use cases:
import DocCardList from '@theme/DocCardList';
<DocCardList />

View File

@@ -0,0 +1,386 @@
---
id: verifying-signed-artifacts
title: Verifying Signed Artifacts
sidebar_label: Verify Signed Artifacts
description: Learn how to verify Seerr's signed artifacts and SBOM attestations.
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
# Verifying Signed Artifacts
These artifacts are cryptographically signed using [Sigstore Cosign](https://docs.sigstore.dev/quickstart/quickstart-cosign/):
- Container images
- Helm charts
This ensures that the images you pull are authentic, tamper-proof, and built by the official Seerr release pipeline.
Additionally each container image also includes a CycloneDX SBOM (Software Bill of Materials) attestation, generated with [Trivy](https://aquasecurity.github.io/trivy/), providing transparency about all dependencies included in the image.
---
## Prerequisites
You will need the following tools installed:
- [Cosign](https://docs.sigstore.dev/cosign/system_config/installation/)
To verify images:
- [Docker](https://docs.docker.com/get-docker/) **or** [Podman](https://podman.io/getting-started/installation) (including [Skopeo](https://github.com/containers/skopeo/blob/main/install.md))
---
## Verifying Signed Images
### Image Locations
Official Seerr images are available from:
- GitHub Container Registry (GHCR): `ghcr.io/seerr-team/seerr:<tag>`
- Docker Hub: `seerr/seerr:<tag>`
You can view all available tags on the [Seerr Releases page](https://github.com/seerr-team/seerr/releases).
---
### Verifying a Specific Release Tag
Each tagged release (for example `v2.7.4`) is immutable and cryptographically signed.
Verification should always be performed using the image digest (SHA256).
#### Retrieve the Image Digest
<Tabs groupId="verify-methods">
<TabItem value="docker" label="Docker">
```bash
docker buildx imagetools inspect ghcr.io/seerr-team/seerr:v2.7.4 --format '{{json .Manifest.Digest}}' | tr -d '"'
```
</TabItem>
<TabItem value="podman" label="Podman / Skopeo">
```bash
skopeo inspect docker://ghcr.io/seerr-team/seerr:v2.7.4 --format '{{.Digest}}'
```
</TabItem>
</Tabs>
Example output:
```
sha256:abcd1234...
```
---
#### Verify the Image Signature
<Tabs groupId="registry-methods">
<TabItem value="ghcr" label="GitHub Container Registry (GHCR)">
```bash
cosign verify ghcr.io/seerr-team/seerr@sha256:abcd1234... \
--certificate-identity "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v2.7.4" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
<TabItem value="dockerhub" label="Docker Hub">
```bash
cosign verify seerr/seerr@sha256:abcd1234... \
--certificate-identity "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v2.7.4" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
</Tabs>
:::info Successful Verification Example
Verification for `ghcr.io/seerr-team/seerr@sha256:abcd1234...`
The following checks were performed:
- Cosign claims validated
- Signatures verified against the transparency log
- Certificate issued by Fulcio to the expected workflow identity
:::
---
### Verifying the `latest` Tag
:::warning Latest Tag Warning
The `latest` tag is **mutable**, meaning it will change with each new release.
Always verify the digest that `latest` currently points to.
:::
#### Retrieve the Digest for `latest`
<Tabs groupId="verify-methods">
<TabItem value="docker" label="Docker">
```bash
docker buildx imagetools inspect ghcr.io/seerr-team/seerr:latest --format '{{json .Manifest.Digest}}' | tr -d '"'
```
</TabItem>
<TabItem value="podman" label="Podman / Skopeo">
```bash
skopeo inspect docker://ghcr.io/seerr-team/seerr:latest --format '{{.Digest}}'
```
</TabItem>
</Tabs>
Example output:
```
sha256:abcd1234...
```
#### Verify the Signature
<Tabs groupId="registry-methods">
<TabItem value="ghcr" label="GHCR">
```bash
cosign verify ghcr.io/seerr-team/seerr@sha256:abcd1234... \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v.*" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
<TabItem value="dockerhub" label="Docker Hub">
```bash
cosign verify seerr/seerr@sha256:abcd1234... \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v.*" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
</Tabs>
:::tip
The wildcard `v.*` ensures verification works for any versioned release that `latest` represents.
:::
---
### Verifying SBOM Attestations
Each image includes a CycloneDX SBOM attestation.
#### Verify the Attestation
```bash
cosign verify-attestation ghcr.io/seerr-team/seerr@sha256:abcd1234... \
--type cyclonedx \
--certificate-identity "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v2.7.4" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
:::info Successful Verification Example
Verification for `ghcr.io/seerr-team/seerr@sha256:abcd1234...`
The following checks were performed:
- Cosign claims validated
- Signatures verified against the transparency log
- Certificate issued by Fulcio to the expected workflow identity
:::
#### Extract the SBOM for Inspection
```bash
cosign verify-attestation ghcr.io/seerr-team/seerr@sha256:abcd1234... \
--type cyclonedx \
--certificate-identity "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v2.7.4" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com" | jq -r '.payload | @base64d' > sbom.json
```
You can open `sbom.json` in a CycloneDX viewer or analyse it with [Trivy](https://aquasecurity.github.io/trivy/) or [Dependency-Track](https://dependencytrack.org/).
---
### Expected Certificate Identity
The expected certificate identity for all signed Seerr images is:
```
https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v*
```
This confirms that the image was:
- Built by the official Seerr Release workflow
- Produced from the seerr-team/seerr repository
- Signed using GitHubs OIDC identity via Sigstore Fulcio
---
### Example: Full Verification Flow
<Tabs groupId="verify-examples">
<TabItem value="docker" label="Docker">
```bash
DIGEST=$(docker buildx imagetools inspect ghcr.io/seerr-team/seerr:latest --format '{{json .Manifest.Digest}}' | tr -d '"')
cosign verify ghcr.io/seerr-team/seerr@"$DIGEST" \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v.*" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
cosign verify-attestation ghcr.io/seerr-team/seerr@"$DIGEST" \
--type cyclonedx \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v.*" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
<TabItem value="podman" label="Podman / Skopeo">
```bash
DIGEST=$(skopeo inspect docker://ghcr.io/seerr-team/seerr:latest --format '{{.Digest}}')
cosign verify ghcr.io/seerr-team/seerr@"$DIGEST" \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/release.yml@refs/tags/v.*" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
</Tabs>
## Verifying Signed Helm charts
### Helm Chart Locations
Official Seerr helm charts are available from:
- GitHub Container Registry (GHCR): `ghcr.io/seerr-team/seerr/seerr-chart/seerr-chart:<tag>`
You can view all available tags on the [Seerr Releases page](https://github.com/seerr-team/seerr/pkgs/container/seerr%2Fseerr-chart).
---
### Verifying a Specific Release Tag
Each tagged release (for example `3.0.0`) is immutable and cryptographically signed.
Verification should always be performed using the image digest (SHA256).
#### Retrieve the Helm Chart Digest
<Tabs groupId="verify-methods">
<TabItem value="docker" label="Docker">
```bash
docker buildx imagetools inspect ghcr.io/seerr-team/seerr/seerr-chart:3.0.0 --format '{{json .Manifest.Digest}}' | tr -d '"'
```
</TabItem>
<TabItem value="podman" label="Podman / Skopeo">
```bash
skopeo inspect docker://ghcr.io/seerr-team/seerr/seerr-chart:3.0.0 --format '{{.Digest}}'
```
</TabItem>
</Tabs>
Example output:
```
sha256:abcd1234...
```
---
#### Verify the Helm Chart Signature
```bash
cosign verify ghcr.io/seerr-team/seerr/seerr-chart@sha256:abcd1234... \
--certificate-identity "https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
:::info Successful Verification Example
Verification for `ghcr.io/seerr-team/seerr/seerr-chart@sha256:abcd1234...`
The following checks were performed:
- Cosign claims validated
- Signatures verified against the transparency log
- Certificate issued by Fulcio to the expected workflow identity
:::
---
### Expected Certificate Identity
The expected certificate identity for all signed Seerr images is:
```
https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main
```
This confirms that the image was:
- Built by the official Seerr Release workflow
- Produced from the seerr-team/seerr repository
- Signed using GitHubs OIDC identity via Sigstore Fulcio
---
### Example: Full Verification Flow
<Tabs groupId="verify-examples">
<TabItem value="docker" label="Docker">
```bash
DIGEST=$(docker buildx imagetools inspect ghcr.io/seerr-team/seerr/seerr-chart:3.0.0 --format '{{json .Manifest.Digest}}' | tr -d '"')
cosign verify ghcr.io/seerr-team/seerr/seerr-chart@"$DIGEST" \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
cosign verify-attestation ghcr.io/seerr-team/seerr/seerr-chart@"$DIGEST" \
--type cyclonedx \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
<TabItem value="podman" label="Podman / Skopeo">
```bash
DIGEST=$(skopeo inspect docker://ghcr.io/seerr-team/seerr/seerr-chart:3.0.0 --format '{{.Digest}}')
cosign verify ghcr.io/seerr-team/seerr/seerr-chart@"$DIGEST" \
--certificate-identity-regexp "https://github.com/seerr-team/seerr/.github/workflows/helm.yml@refs/heads/main" \
--certificate-oidc-issuer "https://token.actions.githubusercontent.com"
```
</TabItem>
</Tabs>
---
## Troubleshooting
| Issue | Likely Cause | Suggested Fix |
|-------|---------------|----------------|
| `no matching signatures` | Incorrect digest or tag | Retrieve the digest again using Docker or Skopeo |
| `certificate identity does not match expected` | Workflow reference changed | Ensure your `--certificate-identity` matches this documentation |
| `cosign: command not found` | Cosign not installed | Install Cosign from the official release |
| `certificate expired` | Old release | Verify a newer tag or digest |
---
## Further Reading
- [Sigstore Documentation](https://docs.sigstore.dev)
- [Cosign Verification Guide](https://docs.sigstore.dev/cosign/verifying/verify/)
- [CycloneDX Specification](https://cyclonedx.org/specification/overview/)
- [Trivy Documentation](https://trivy.dev/latest/docs/)
- [Skopeo Documentation](https://github.com/containers/skopeo)
- [Podman Documentation](https://podman.io/get-started/)
- [Docker Documentation](https://docs.docker.com/)
- [Seerr GitHub Repository](https://github.com/seerr-team/seerr)

View File

@@ -22,4 +22,4 @@ Users can customize their notification preferences in their own user notificatio
## Requesting New Notification Agents
If we do not currently support your preferred notification agent, feel free to [submit a feature request on GitHub](https://github.com/fallenbagel/jellyseerr/issues). However, please be sure to search first and confirm that there is not already an existing request for the agent!
If we do not currently support your preferred notification agent, feel free to [submit a feature request on GitHub](https://github.com/seerr-team/seerr/issues). However, please be sure to search first and confirm that there is not already an existing request for the agent!

View File

@@ -16,7 +16,7 @@ User notifications are separate from system notifications, and the available not
### Application/API Token
[Register an application](https://pushover.net/apps/build) and enter the API token in this field. (You can use one of the [official icons in our GitHub repository](https://github.com/fallenbagel/jellyseerr/tree/develop/public) when configuring the application.)
[Register an application](https://pushover.net/apps/build) and enter the API token in this field. (You can use one of the [official icons in our GitHub repository](https://github.com/seerr-team/seerr/tree/develop/public) when configuring the application.)
For more details on registering applications or the API token, please see the [Pushover API documentation](https://pushover.net/api#registration).

View File

@@ -1,24 +0,0 @@
---
title: Welcome to the Jellyseerr Blog
description: The official Jellyseerr blog for release notes, technical updates, and community news.
slug: welcome
authors: [fallenbagel, gauthier-th]
tags: [announcement, jellyseerr, blog]
image: https://raw.githubusercontent.com/fallenbagel/jellyseerr/refs/heads/develop/gen-docs/static/img/logo.svg
hide_table_of_contents: false
---
We are pleased to introduce the official Jellyseerr blog.
This space will serve as the central place for:
- Release announcements
- Updates on new features and improvements
- Technical articles, such as details on our [**DNS caching package**](https://github.com/jellyseerr/dns-caching) and other enhancements
- Community-related news
<!--truncate-->
Our goal is to keep the community informed and provide deeper insights into the ongoing development of Jellyseerr.
Thank you for being part of the Jellyseerr project. More updates will follow soon.

View File

@@ -0,0 +1,24 @@
---
title: Welcome to the Seerr Blog
description: The official Seerr blog for release notes, technical updates, and community news.
slug: welcome
authors: [fallenbagel, gauthier-th]
tags: [announcement, seerr, blog]
image: https://raw.githubusercontent.com/seerr-team/seerr/refs/heads/develop/gen-docs/static/img/logo.svg
hide_table_of_contents: false
---
We are pleased to introduce the official Seerr blog.
This space will serve as the central place for:
- Release announcements
- Updates on new features and improvements
- Technical articles, such as details on our [**DNS caching package**](https://github.com/seerr/dns-caching) and other enhancements
- Community-related news
<!--truncate-->
Our goal is to keep the community informed and provide deeper insights into the ongoing development of Seerr.
Thank you for being part of the Seerr project. More updates will follow soon.

View File

@@ -7,7 +7,7 @@ export const SeerrVersion = () => {
async function fetchVersion() {
try {
const response = await fetch(
'https://raw.githubusercontent.com/fallenbagel/jellyseerr/main/package.json'
'https://raw.githubusercontent.com/seerr-team/seerr/main/package.json'
);
const data = await response.json();

View File

@@ -11,6 +11,8 @@ module.exports = {
{ hostname: 'image.tmdb.org' },
{ hostname: 'artworks.thetvdb.com' },
{ hostname: 'plex.tv' },
{ hostname: 'archive.org' },
{ hostname: 'r2.theaudiodb.com' },
],
},
webpack(config) {

View File

@@ -2,7 +2,7 @@
"name": "seerr",
"version": "0.1.0",
"private": true,
"packageManager": "pnpm@10.17.1",
"packageManager": "pnpm@10.24.0",
"scripts": {
"preinstall": "npx only-allow pnpm",
"postinstall": "node postinstall-win.js",
@@ -33,42 +33,43 @@
},
"license": "MIT",
"dependencies": {
"@dr.pogodin/csurf": "^1.14.1",
"@formatjs/intl-displaynames": "6.2.6",
"@dr.pogodin/csurf": "^1.16.6",
"@formatjs/intl-displaynames": "6.8.13",
"@formatjs/intl-locale": "3.1.1",
"@formatjs/intl-pluralrules": "5.1.10",
"@formatjs/intl-pluralrules": "5.4.6",
"@formatjs/intl-utils": "3.8.4",
"@formatjs/swc-plugin-experimental": "^0.4.0",
"@headlessui/react": "1.7.12",
"@heroicons/react": "2.0.16",
"@heroicons/react": "2.2.0",
"@supercharge/request-ip": "1.2.0",
"@svgr/webpack": "6.5.1",
"@tanem/react-nprogress": "5.0.30",
"@tanem/react-nprogress": "5.0.56",
"@types/ua-parser-js": "^0.7.36",
"@types/wink-jaro-distance": "^2.0.2",
"ace-builds": "1.15.2",
"axios": "1.10.0",
"axios-rate-limit": "1.3.0",
"ace-builds": "1.43.4",
"axios": "1.13.2",
"axios-rate-limit": "1.4.0",
"bcrypt": "5.1.0",
"bowser": "2.11.0",
"bowser": "2.13.1",
"connect-typeorm": "1.1.4",
"cookie-parser": "1.4.7",
"copy-to-clipboard": "3.3.3",
"country-flag-icons": "1.5.5",
"country-flag-icons": "1.6.4",
"cronstrue": "2.23.0",
"date-fns": "2.29.3",
"dayjs": "1.11.7",
"dayjs": "1.11.19",
"dns-caching": "^0.2.7",
"dompurify": "^3.2.4",
"email-templates": "12.0.1",
"email-validator": "2.0.4",
"express": "4.21.2",
"express-openapi-validator": "4.13.8",
"express-rate-limit": "6.7.0",
"express-session": "1.17.3",
"formik": "^2.4.6",
"express-session": "1.18.2",
"formik": "^2.4.9",
"gravatar-url": "3.1.0",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"jsdom": "^26.0.0",
"lodash": "4.17.21",
"mime": "3",
"next": "^14.2.25",
@@ -77,19 +78,19 @@
"node-schedule": "2.1.1",
"nodemailer": "6.10.0",
"openpgp": "5.11.2",
"pg": "8.11.0",
"pg": "8.16.3",
"plex-api": "5.3.2",
"pug": "3.0.3",
"react": "^18.3.1",
"react-ace": "10.1.0",
"react-animate-height": "2.1.2",
"react-aria": "3.23.0",
"react-aria": "3.44.0",
"react-dom": "^18.3.1",
"react-intersection-observer": "9.4.3",
"react-intl": "^6.6.8",
"react-markdown": "8.0.5",
"react-popper-tooltip": "4.4.2",
"react-select": "5.7.0",
"react-select": "5.10.2",
"react-spring": "9.7.1",
"react-tailwindcss-datepicker-sct": "1.3.4",
"react-toast-notifications": "2.5.1",
@@ -98,18 +99,19 @@
"react-use-clipboard": "1.0.9",
"reflect-metadata": "0.1.13",
"secure-random-password": "0.2.3",
"semver": "7.7.1",
"semver": "7.7.3",
"sharp": "^0.33.4",
"sqlite3": "5.1.7",
"swagger-ui-express": "4.6.2",
"swr": "2.2.5",
"swr": "2.3.7",
"tailwind-merge": "^2.6.0",
"typeorm": "0.3.12",
"ua-parser-js": "^1.0.35",
"undici": "^7.3.0",
"web-push": "3.5.0",
"undici": "^7.16.0",
"validator": "^13.15.23",
"web-push": "3.6.7",
"wink-jaro-distance": "^2.0.0",
"winston": "3.8.2",
"winston": "3.18.3",
"winston-daily-rotate-file": "4.7.1",
"xml2js": "0.4.23",
"yamljs": "0.3.0",
@@ -126,28 +128,32 @@
"@types/cookie-parser": "1.4.3",
"@types/country-flag-icons": "1.2.0",
"@types/csurf": "1.11.2",
"@types/dompurify": "^3.2.0",
"@types/email-templates": "8.0.4",
"@types/express": "4.17.17",
"@types/express-session": "1.17.6",
"@types/jsdom": "^21.1.7",
"@types/lodash": "4.14.191",
"@types/mime": "3",
"@types/node": "22.10.5",
"@types/node-schedule": "2.1.0",
"@types/node-schedule": "2.1.8",
"@types/nodemailer": "6.4.7",
"@types/react": "^18.3.3",
"@types/react-dom": "^18.3.0",
"@types/react-transition-group": "4.4.5",
"@types/react-transition-group": "4.4.12",
"@types/secure-random-password": "0.2.1",
"@types/semver": "7.3.13",
"@types/swagger-ui-express": "4.1.3",
"@types/web-push": "3.3.2",
"@types/semver": "7.7.1",
"@types/swagger-ui-express": "4.1.8",
"@types/validator": "^13.15.10",
"@types/web-push": "3.6.4",
"@types/xml2js": "0.4.11",
"@types/yamljs": "0.2.31",
"@types/yup": "0.29.14",
"@typescript-eslint/eslint-plugin": "5.54.0",
"@typescript-eslint/parser": "5.54.0",
"autoprefixer": "10.4.13",
"commitizen": "4.3.0",
"autoprefixer": "10.4.22",
"baseline-browser-mapping": "^2.8.32",
"commitizen": "4.3.1",
"copyfiles": "2.4.1",
"cy-mobile-commands": "0.3.0",
"cypress": "14.1.0",
@@ -156,22 +162,22 @@
"eslint-config-next": "^14.2.4",
"eslint-config-prettier": "8.6.0",
"eslint-plugin-formatjs": "4.9.0",
"eslint-plugin-jsx-a11y": "6.7.1",
"eslint-plugin-no-relative-import-paths": "1.5.2",
"eslint-plugin-jsx-a11y": "6.10.2",
"eslint-plugin-no-relative-import-paths": "1.6.1",
"eslint-plugin-prettier": "4.2.1",
"eslint-plugin-react": "7.32.2",
"eslint-plugin-react": "7.37.5",
"eslint-plugin-react-hooks": "4.6.0",
"husky": "8.0.3",
"lint-staged": "13.1.2",
"nodemon": "3.1.9",
"postcss": "8.4.31",
"nodemon": "3.1.11",
"postcss": "8.5.6",
"prettier": "2.8.4",
"prettier-plugin-organize-imports": "3.2.2",
"prettier-plugin-tailwindcss": "0.2.3",
"tailwindcss": "3.2.7",
"ts-node": "10.9.1",
"tsc-alias": "1.8.2",
"tsconfig-paths": "4.1.2",
"ts-node": "10.9.2",
"tsc-alias": "1.8.16",
"tsconfig-paths": "4.2.0",
"typescript": "4.9.5"
},
"engines": {
@@ -180,7 +186,7 @@
},
"overrides": {
"sqlite3/node-gyp": "8.4.1",
"@types/express-session": "1.17.6"
"@types/express-session": "1.18.2"
},
"config": {
"commitizen": {
@@ -201,74 +207,13 @@
"@commitlint/config-conventional"
]
},
"release": {
"plugins": [
"@semantic-release/commit-analyzer",
"@semantic-release/release-notes-generator",
"@semantic-release/npm",
[
"@codedependant/semantic-release-docker",
{
"dockerArgs": {
"COMMIT_TAG": "${GITHUB_SHA}"
},
"dockerLogin": false,
"dockerProject": "fallenbagel",
"dockerImage": "jellyseerr",
"dockerTags": [
"latest",
"{{major}}",
"{{major}}.{{minor}}",
"{{major}}.{{minor}}.{{patch}}"
],
"dockerPlatform": [
"linux/amd64",
"linux/arm64"
]
}
],
[
"@codedependant/semantic-release-docker",
{
"dockerArgs": {
"COMMIT_TAG": "${GITHUB_SHA}"
},
"dockerLogin": false,
"dockerRegistry": "ghcr.io",
"dockerProject": "fallenbagel",
"dockerImage": "jellyseerr",
"dockerTags": [
"latest",
"{{major}}",
"{{major}}.{{minor}}",
"{{major}}.{{minor}}.{{patch}}"
],
"dockerPlatform": [
"linux/amd64",
"linux/arm64"
]
}
],
[
"@semantic-release/github",
{
"addReleases": "bottom"
}
]
],
"branches": [
"main"
],
"npmPublish": false,
"publish": [
"@codedependant/semantic-release-docker",
"@semantic-release/github"
]
},
"pnpm": {
"onlyBuiltDependencies": [
"sqlite3",
"bcrypt"
"@swc/core",
"bcrypt",
"cypress",
"sharp",
"sqlite3"
]
}
}

4293
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

View File

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 61 KiB

View File

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

View File

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,211 @@
import ExternalAPI from '@server/api/externalapi';
import { getRepository } from '@server/datasource';
import MetadataAlbum from '@server/entity/MetadataAlbum';
import cacheManager from '@server/lib/cache';
import logger from '@server/logger';
import { In } from 'typeorm';
import type { CoverArtResponse } from './interfaces';
class CoverArtArchive extends ExternalAPI {
private readonly CACHE_TTL = 43200;
private readonly STALE_THRESHOLD = 30 * 24 * 60 * 60 * 1000;
constructor() {
super(
'https://coverartarchive.org',
{},
{
nodeCache: cacheManager.getCache('covertartarchive').data,
rateLimit: {
maxRequests: 20,
maxRPS: 50,
},
}
);
}
private isMetadataStale(metadata: MetadataAlbum | null): boolean {
if (!metadata) return true;
return Date.now() - metadata.updatedAt.getTime() > this.STALE_THRESHOLD;
}
private createEmptyResponse(id: string): CoverArtResponse {
return { images: [], release: `/release/${id}` };
}
private createCachedResponse(url: string, id: string): CoverArtResponse {
return {
images: [
{
approved: true,
front: true,
id: 0,
thumbnails: { 250: url },
},
],
release: `/release/${id}`,
};
}
public async getCoverArtFromCache(
id: string
): Promise<string | null | undefined> {
try {
const metadata = await getRepository(MetadataAlbum).findOne({
where: { mbAlbumId: id },
select: ['caaUrl'],
});
return metadata?.caaUrl;
} catch (error) {
logger.error('Failed to fetch cover art from cache', {
label: 'CoverArtArchive',
id,
error: error instanceof Error ? error.message : 'Unknown error',
});
return null;
}
}
public async getCoverArt(id: string): Promise<CoverArtResponse> {
try {
const metadata = await getRepository(MetadataAlbum).findOne({
where: { mbAlbumId: id },
select: ['caaUrl', 'updatedAt'],
});
if (metadata?.caaUrl) {
return this.createCachedResponse(metadata.caaUrl, id);
}
if (metadata && !this.isMetadataStale(metadata)) {
return this.createEmptyResponse(id);
}
return await this.fetchCoverArt(id);
} catch (error) {
logger.error('Failed to get cover art', {
label: 'CoverArtArchive',
id,
error: error instanceof Error ? error.message : 'Unknown error',
});
return this.createEmptyResponse(id);
}
}
private async fetchCoverArt(id: string): Promise<CoverArtResponse> {
try {
const data = await this.get<CoverArtResponse>(
`/release-group/${id}`,
undefined,
this.CACHE_TTL
);
const releaseMBID = data.release.split('/').pop();
data.images = data.images.map((image) => {
const fullUrl = `https://archive.org/download/mbid-${releaseMBID}/mbid-${releaseMBID}-${image.id}_thumb250.jpg`;
if (image.front) {
getRepository(MetadataAlbum)
.upsert(
{ mbAlbumId: id, caaUrl: fullUrl },
{ conflictPaths: ['mbAlbumId'] }
)
.catch((e) => {
logger.error('Failed to save album metadata', {
label: 'CoverArtArchive',
error: e instanceof Error ? e.message : 'Unknown error',
});
});
}
return {
approved: image.approved,
front: image.front,
id: image.id,
thumbnails: { 250: fullUrl },
};
});
return data;
} catch (error) {
await getRepository(MetadataAlbum).upsert(
{ mbAlbumId: id, caaUrl: null },
{ conflictPaths: ['mbAlbumId'] }
);
return this.createEmptyResponse(id);
}
}
public async batchGetCoverArt(
ids: string[]
): Promise<Record<string, string | null>> {
if (!ids.length) return {};
const validIds = ids.filter(
(id) =>
typeof id === 'string' &&
/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/.test(
id
)
);
if (!validIds.length) return {};
const resultsMap = new Map<string, string | null>();
const idsToFetch: string[] = [];
const metadataRepository = getRepository(MetadataAlbum);
const existingMetadata = await metadataRepository.find({
where: { mbAlbumId: In(validIds) },
select: ['mbAlbumId', 'caaUrl', 'updatedAt'],
});
const metadataMap = new Map(
existingMetadata.map((metadata) => [metadata.mbAlbumId, metadata])
);
for (const id of validIds) {
const metadata = metadataMap.get(id);
if (metadata?.caaUrl) {
resultsMap.set(id, metadata.caaUrl);
} else if (metadata && !this.isMetadataStale(metadata)) {
resultsMap.set(id, null);
} else {
idsToFetch.push(id);
}
}
if (idsToFetch.length > 0) {
const batchPromises = idsToFetch.map((id) =>
this.fetchCoverArt(id)
.then((response) => {
const frontImage = response.images.find((img) => img.front);
resultsMap.set(id, frontImage?.thumbnails?.[250] || null);
return true;
})
.catch((error) => {
logger.error('Failed to fetch cover art', {
label: 'CoverArtArchive',
id,
error: error instanceof Error ? error.message : 'Unknown error',
});
resultsMap.set(id, null);
return false;
})
);
await Promise.allSettled(batchPromises);
}
const results: Record<string, string | null> = {};
for (const [key, value] of resultsMap.entries()) {
results[key] = value;
}
return results;
}
}
export default CoverArtArchive;

View File

@@ -0,0 +1,15 @@
interface CoverArtThumbnails {
250: string;
}
interface CoverArtImage {
approved: boolean;
front: boolean;
id: number;
thumbnails: CoverArtThumbnails;
}
export interface CoverArtResponse {
images: CoverArtImage[];
release: string;
}

View File

@@ -56,7 +56,7 @@ interface JellyfinMediaFolder {
}
export interface JellyfinLibrary {
type: 'show' | 'movie';
type: 'show' | 'movie' | 'music';
key: string;
title: string;
agent: string;
@@ -66,7 +66,13 @@ export interface JellyfinLibraryItem {
Name: string;
Id: string;
HasSubtitles: boolean;
Type: 'Movie' | 'Episode' | 'Season' | 'Series';
Type:
| 'Movie'
| 'Episode'
| 'Season'
| 'Series'
| 'MusicAlbum'
| 'MusicArtist';
LocationType: 'FileSystem' | 'Offline' | 'Remote' | 'Virtual';
SeriesName?: string;
SeriesId?: string;
@@ -76,6 +82,8 @@ export interface JellyfinLibraryItem {
IndexNumberEnd?: number;
ParentIndexNumber?: number;
MediaType: string;
AlbumId?: string;
ArtistId?: string;
}
export interface JellyfinMediaStream {
@@ -104,6 +112,9 @@ export interface JellyfinLibraryItemExtended extends JellyfinLibraryItem {
Imdb?: string;
Tvdb?: string;
AniDB?: string;
MusicBrainzReleaseGroup: string | undefined;
MusicBrainzAlbum?: string;
MusicBrainzArtistId?: string;
};
MediaSources?: JellyfinMediaSource[];
Width?: number;
@@ -112,6 +123,10 @@ export interface JellyfinLibraryItemExtended extends JellyfinLibraryItem {
DateCreated?: string;
}
type EpisodeReturn<T> = T extends { includeMediaInfo: true }
? JellyfinLibraryItemExtended[]
: JellyfinLibraryItem[];
export interface JellyfinItemsReponse {
Items: JellyfinLibraryItemExtended[];
TotalRecordCount: number;
@@ -145,7 +160,7 @@ class JellyfinAPI extends ExternalAPI {
{},
{
headers: {
'X-Emby-Authorization': authHeaderVal,
Authorization: authHeaderVal,
'Content-Type': 'application/json',
Accept: 'application/json',
},
@@ -304,13 +319,7 @@ class JellyfinAPI extends ExternalAPI {
}
private mapLibraries(mediaFolders: JellyfinMediaFolder[]): JellyfinLibrary[] {
const excludedTypes = [
'music',
'books',
'musicvideos',
'homevideos',
'boxsets',
];
const excludedTypes = ['books', 'musicvideos', 'homevideos', 'boxsets'];
return mediaFolders
.filter((Item: JellyfinMediaFolder) => {
@@ -323,7 +332,12 @@ class JellyfinAPI extends ExternalAPI {
return <JellyfinLibrary>{
key: Item.Id,
title: Item.Name,
type: Item.CollectionType === 'movies' ? 'movie' : 'show',
type:
Item.CollectionType === 'movies'
? 'movie'
: Item.CollectionType === 'tvshows'
? 'show'
: 'music',
agent: 'jellyfin',
};
});
@@ -332,7 +346,7 @@ class JellyfinAPI extends ExternalAPI {
public async getLibraryContents(id: string): Promise<JellyfinLibraryItem[]> {
try {
const libraryItemsResponse = await this.get<any>(
`/Items?SortBy=SortName&SortOrder=Ascending&IncludeItemTypes=Series,Movie,Others&Recursive=true&StartIndex=0&ParentId=${id}&collapseBoxSetItems=false`
`/Items?SortBy=SortName&SortOrder=Ascending&IncludeItemTypes=Series,Movie,MusicAlbum,MusicArtist,Others&Recursive=true&StartIndex=0&ParentId=${id}&collapseBoxSetItems=false`
);
return libraryItemsResponse.Items.filter(
@@ -415,13 +429,22 @@ class JellyfinAPI extends ExternalAPI {
}
}
public async getEpisodes(
public async getEpisodes<
T extends { includeMediaInfo?: boolean } | undefined = undefined
>(
seriesID: string,
seasonID: string
): Promise<JellyfinLibraryItem[]> {
seasonID: string,
options?: T
): Promise<EpisodeReturn<T>> {
try {
const episodeResponse = await this.get<any>(
`/Shows/${seriesID}/Episodes?seasonId=${seasonID}`
`/Shows/${seriesID}/Episodes`,
{
params: {
seasonId: seasonID,
...(options?.includeMediaInfo && { fields: 'MediaSources' }),
},
}
);
return episodeResponse.Items.filter(

View File

@@ -0,0 +1,134 @@
import ExternalAPI from '@server/api/externalapi';
import cacheManager from '@server/lib/cache';
import type {
LbAlbumDetails,
LbArtistDetails,
LbFreshReleasesResponse,
LbTopAlbumsResponse,
LbTopArtistsResponse,
} from './interfaces';
class ListenBrainzAPI extends ExternalAPI {
constructor() {
super(
'https://api.listenbrainz.org/1',
{},
{
nodeCache: cacheManager.getCache('listenbrainz').data,
rateLimit: {
maxRequests: 20,
maxRPS: 25,
},
}
);
}
public async getAlbum(mbid: string): Promise<LbAlbumDetails> {
try {
return await this.post<LbAlbumDetails>(
`/album/${mbid}`,
{},
{
baseURL: 'https://listenbrainz.org',
},
43200
);
} catch (e) {
throw new Error(
`[ListenBrainz] Failed to fetch album details: ${
e instanceof Error ? e.message : 'Unknown error'
}`
);
}
}
public async getArtist(mbid: string): Promise<LbArtistDetails> {
try {
return await this.post<LbArtistDetails>(
`/artist/${mbid}`,
{},
{
baseURL: 'https://listenbrainz.org',
},
43200
);
} catch (e) {
throw new Error(
`[ListenBrainz] Failed to fetch artist details: ${
e instanceof Error ? e.message : 'Unknown error'
}`
);
}
}
public async getTopAlbums({
offset = 0,
range = 'month',
count = 20,
}: {
offset?: number;
range?: string;
count?: number;
}): Promise<LbTopAlbumsResponse> {
return this.get<LbTopAlbumsResponse>(
'/stats/sitewide/release-groups',
{
params: {
offset: offset.toString(),
range,
count: count.toString(),
},
},
43200
);
}
public async getTopArtists({
offset = 0,
range = 'month',
count = 20,
}: {
offset?: number;
range?: string;
count?: number;
}): Promise<LbTopArtistsResponse> {
return this.get<LbTopArtistsResponse>(
'/stats/sitewide/artists',
{
params: {
offset: offset.toString(),
range,
count: count.toString(),
},
},
43200
);
}
public async getFreshReleases({
days = 7,
sort = 'release_date',
offset = 0,
count = 20,
}: {
days?: number;
sort?: string;
offset?: number;
count?: number;
} = {}): Promise<LbFreshReleasesResponse> {
return this.get<LbFreshReleasesResponse>(
'/explore/fresh-releases',
{
params: {
days: days.toString(),
sort,
offset: offset.toString(),
count: count.toString(),
},
},
43200
);
}
}
export default ListenBrainzAPI;

View File

@@ -0,0 +1,243 @@
export interface LbSimilarArtistResponse {
artist_mbid: string;
name: string;
comment: string;
type: string | null;
gender: string | null;
score: number;
reference_mbid: string;
}
export interface LbReleaseGroup {
artist_mbids: string[];
artist_name: string;
caa_id: number;
caa_release_mbid: string;
listen_count: number;
release_group_mbid: string;
release_group_name: string;
}
export interface LbTopAlbumsResponse {
payload: {
count: number;
from_ts: number;
last_updated: number;
offset: number;
range: string;
release_groups: LbReleaseGroup[];
to_ts: number;
};
}
export interface LbArtist {
artist_credit_name: string;
artist_mbid: string;
join_phrase: string;
}
export interface LbTrack {
artist_mbids: string[];
artists: LbArtist[];
length: number;
name: string;
position: number;
recording_mbid: string;
total_listen_count: number;
total_user_count: number;
}
export interface LbMedium {
format: string;
name: string;
position: number;
tracks: LbTrack[];
}
export interface LbListener {
listen_count: number;
user_name: string;
}
export interface LbListeningStats {
artist_mbids: string[];
artist_name: string;
caa_id: number;
caa_release_mbid: string;
from_ts: number;
last_updated: number;
listeners: LbListener[];
release_group_mbid: string;
release_group_name: string;
stats_range: string;
to_ts: number;
total_listen_count: number;
total_user_count: number;
}
export interface LbAlbumDetails {
caa_id: number;
caa_release_mbid: string;
listening_stats: LbListeningStats;
mediums: LbMedium[];
recordings_release_mbid: string;
release_group_mbid: string;
release_group_metadata: {
artist: {
artist_credit_id: number;
artists: {
area: string;
artist_mbid: string;
begin_year: number;
join_phrase: string;
name: string;
rels: { [key: string]: string };
type: string;
}[];
name: string;
};
release: {
caa_id: number;
caa_release_mbid: string;
date: string;
name: string;
rels: any[];
type: string;
};
release_group: {
caa_id: number;
caa_release_mbid: string;
date: string;
name: string;
rels: any[];
type: string;
};
tag: {
artist: {
artist_mbid: string;
count: number;
tag: string;
}[];
release_group: {
count: number;
genre_mbid: string;
tag: string;
}[];
};
};
type: string;
}
export interface LbArtistRels {
[key: string]: string;
}
export interface LbArtistTag {
artist_mbid: string;
count: number;
tag: string;
}
export interface LbArtistMetadata {
area: string;
artist_mbid: string;
begin_year: number;
mbid: string;
name: string;
rels: LbArtistRels;
tag: {
artist: LbArtistTag[];
};
type: string;
}
export interface LbPopularRecording {
artist_mbids: string[];
artist_name: string;
artists: LbArtist[];
caa_id: number;
caa_release_mbid: string;
length: number;
recording_mbid: string;
recording_name: string;
release_color?: {
blue: number;
green: number;
red: number;
};
release_mbid: string;
release_name: string;
total_listen_count: number;
total_user_count: number;
}
export interface LbReleaseGroupExtended extends LbReleaseGroup {
artist_credit_name: string;
artists: LbArtist[];
date: string;
mbid: string;
type: string;
name: string;
secondary_types?: string[];
total_listen_count: number;
}
export interface LbArtistDetails {
artist: LbArtistMetadata;
coverArt: string;
listeningStats: LbListeningStats;
popularRecordings: LbPopularRecording[];
releaseGroups: LbReleaseGroupExtended[];
similarArtists: {
artists: LbSimilarArtistResponse[];
topRecordingColor: {
blue: number;
green: number;
red: number;
};
topReleaseGroupColor: {
blue: number;
green: number;
red: number;
};
};
}
export interface LbArtist {
artist_mbid: string;
artist_name: string;
listen_count: number;
}
export interface LbTopArtistsResponse {
payload: {
count: number;
from_ts: number;
last_updated: number;
offset: number;
range: string;
artists: LbArtist[];
to_ts: number;
};
}
export interface LbRelease {
artist_credit_name: string;
artist_mbids: string[];
caa_id: number;
caa_release_mbid: string;
listen_count: number;
release_date: string;
release_group_mbid: string;
release_group_primary_type: string;
release_group_secondary_type: string;
release_mbid: string;
release_name: string;
release_tags: string[];
}
export interface LbFreshReleasesResponse {
payload: {
releases: LbRelease[];
};
}

View File

@@ -0,0 +1,192 @@
import ExternalAPI from '@server/api/externalapi';
import cacheManager from '@server/lib/cache';
import axios from 'axios';
import DOMPurify from 'dompurify';
import { JSDOM } from 'jsdom';
import type { MbAlbumDetails, MbArtistDetails } from './interfaces';
const window = new JSDOM('').window;
const purify = DOMPurify(window);
class MusicBrainz extends ExternalAPI {
constructor() {
super(
'https://musicbrainz.org/ws/2',
{},
{
headers: {
'User-Agent':
'Jellyseerr/1.0.0 (https://github.com/Fallenbagel/jellyseerr)',
Accept: 'application/json',
},
nodeCache: cacheManager.getCache('musicbrainz').data,
rateLimit: {
maxRequests: 1,
maxRPS: 1,
},
}
);
}
public async searchAlbum({
query,
limit = 30,
offset = 0,
}: {
query: string;
limit?: number;
offset?: number;
}): Promise<MbAlbumDetails[]> {
try {
const data = await this.get<{
created: string;
count: number;
offset: number;
'release-groups': MbAlbumDetails[];
}>(
'/release-group',
{
params: {
query,
fmt: 'json',
limit: limit.toString(),
offset: offset.toString(),
},
},
43200
);
return data['release-groups'];
} catch (e) {
throw new Error(
`[MusicBrainz] Failed to search albums: ${
e instanceof Error ? e.message : 'Unknown error'
}`
);
}
}
public async searchArtist({
query,
limit = 50,
offset = 0,
}: {
query: string;
limit?: number;
offset?: number;
}): Promise<MbArtistDetails[]> {
try {
const data = await this.get<{
created: string;
count: number;
offset: number;
artists: MbArtistDetails[];
}>(
'/artist',
{
params: {
query,
fmt: 'json',
limit: limit.toString(),
offset: offset.toString(),
},
},
43200
);
return data.artists;
} catch (e) {
throw new Error(
`[MusicBrainz] Failed to search artists: ${
e instanceof Error ? e.message : 'Unknown error'
}`
);
}
}
public async getArtistWikipediaExtract({
artistMbid,
language = 'en',
}: {
artistMbid: string;
language?: string;
}): Promise<{ title: string; url: string; content: string } | null> {
if (
!artistMbid ||
typeof artistMbid !== 'string' ||
!/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/.test(
artistMbid
)
) {
throw new Error('Invalid MusicBrainz artist ID format');
}
try {
const safeUrl = `https://musicbrainz.org/artist/${artistMbid}/wikipedia-extract`;
const response = await axios.get(safeUrl, {
headers: {
Accept: 'application/json',
'Accept-Language': language,
'User-Agent':
'Jellyseerr/1.0.0 (https://github.com/Fallenbagel/jellyseerr)',
},
});
const data = response.data;
if (!data.wikipediaExtract || !data.wikipediaExtract.content) {
return null;
}
const cleanContent = purify.sanitize(data.wikipediaExtract.content, {
ALLOWED_TAGS: [],
ALLOWED_ATTR: [],
});
return {
title: data.wikipediaExtract.title,
url: data.wikipediaExtract.url,
content: cleanContent.trim(),
};
} catch (error) {
throw new Error(
`[MusicBrainz] Failed to fetch Wikipedia extract: ${
error instanceof Error ? error.message : 'Unknown error'
}`
);
}
}
public async getReleaseGroup({
releaseId,
}: {
releaseId: string;
}): Promise<string | null> {
try {
const data = await this.get<{
'release-group': {
id: string;
};
}>(
`/release/${releaseId}`,
{
params: {
inc: 'release-groups',
fmt: 'json',
},
},
43200
);
return data['release-group']?.id ?? null;
} catch (e) {
throw new Error(
`[MusicBrainz] Failed to fetch release group: ${
e instanceof Error ? e.message : 'Unknown error'
}`
);
}
}
}
export default MusicBrainz;

View File

@@ -0,0 +1,119 @@
interface MbResult {
id: string;
score: number;
}
export interface MbLink {
type: string;
target: string;
}
export interface MbAlbumResult extends MbResult {
media_type: 'album';
title: string;
'primary-type': 'Album' | 'Single' | 'EP';
'first-release-date': string;
'artist-credit': {
name: string;
artist: {
id: string;
name: string;
'sort-name': string;
overview?: string;
};
}[];
posterPath: string | undefined;
}
export interface MbAlbumDetails extends MbAlbumResult {
'type-id': string;
'primary-type-id': string;
count: number;
'secondary-types'?: string[];
'secondary-type-ids'?: string[];
releases: {
id: string;
title: string;
status: string;
'status-id': string;
}[];
releasedate: string;
tags?: {
count: number;
name: string;
}[];
artists?: {
id: string;
name: string;
overview?: string;
}[];
links?: MbLink[];
poster_path?: string;
}
export interface MbArtistResult extends MbResult {
media_type: 'artist';
name: string;
type: 'Group' | 'Person';
'sort-name': string;
country?: string;
disambiguation?: string;
artistThumb?: string | null;
artistBackdrop?: string | null;
}
export interface MbArtistDetails extends MbArtistResult {
'type-id': string;
area?: {
id: string;
name: string;
type: string;
'type-id': string;
'sort-name': string;
};
'begin-area'?: {
id: string;
name: string;
type: string;
'sort-name': string;
};
'life-span'?: {
begin?: string;
ended: boolean;
};
gender?: string;
'gender-id'?: string;
isnis?: string[];
aliases?: {
name: string;
'sort-name': string;
type?: string;
'type-id'?: string;
}[];
tags?: {
count: number;
name: string;
}[];
links?: MbLink[];
}
export interface MbSearchAlbumResponse {
created: string;
count: number;
offset: number;
'release-groups': MbAlbumDetails[];
}
export interface MbSearchArtistResponse {
created: string;
count: number;
offset: number;
artists: MbArtistDetails[];
}
export interface MbSearchMultiResponse {
created: string;
count: number;
offset: number;
results: (MbArtistResult | MbAlbumResult)[];
}

View File

@@ -16,7 +16,7 @@ export interface PlexLibraryItem {
Guid?: {
id: string;
}[];
type: 'movie' | 'show' | 'season' | 'episode';
type: 'movie' | 'show' | 'season' | 'episode' | 'artist' | 'album' | 'track';
Media: Media[];
}
@@ -28,7 +28,7 @@ interface PlexLibraryResponse {
}
export interface PlexLibrary {
type: 'show' | 'movie';
type: 'show' | 'movie' | 'artist';
key: string;
title: string;
agent: string;
@@ -44,7 +44,7 @@ export interface PlexMetadata {
ratingKey: string;
parentRatingKey?: string;
guid: string;
type: 'movie' | 'show' | 'season';
type: 'movie' | 'show' | 'season' | 'artist' | 'album' | 'track';
title: string;
Guid: {
id: string;
@@ -152,7 +152,10 @@ class PlexAPI {
const newLibraries: Library[] = libraries
// Remove libraries that are not movie or show
.filter(
(library) => library.type === 'movie' || library.type === 'show'
(library) =>
library.type === 'movie' ||
library.type === 'show' ||
library.type === 'artist'
)
// Remove libraries that do not have a metadata agent set (usually personal video libraries)
.filter((library) => library.agent !== 'com.plexapp.agents.none')
@@ -165,7 +168,7 @@ class PlexAPI {
id: library.key,
name: library.title,
enabled: existing?.enabled ?? false,
type: library.type,
type: library.type === 'artist' ? 'music' : library.type,
lastScan: existing?.lastScan,
};
});
@@ -227,12 +230,19 @@ class PlexAPI {
options: { addedAt: number } = {
addedAt: Date.now() - 1000 * 60 * 60,
},
mediaType: 'movie' | 'show'
mediaType: 'movie' | 'show' | 'album'
): Promise<PlexLibraryItem[]> {
let typeCode = '1';
if (mediaType === 'show') {
typeCode = '4';
} else if (mediaType === 'album') {
typeCode = '9';
}
const response = await this.plexClient.query<PlexLibraryResponse>({
uri: `/library/sections/${id}/all?type=${
mediaType === 'show' ? '4' : '1'
}&sort=addedAt%3Adesc&addedAt>>=${Math.floor(options.addedAt / 1000)}`,
uri: `/library/sections/${id}/all?type=${typeCode}&sort=addedAt%3Adesc&addedAt>>=${Math.floor(
options.addedAt / 1000
)}`,
extraHeaders: {
'X-Plex-Container-Start': `0`,
'X-Plex-Container-Size': `500`,

View File

@@ -124,7 +124,7 @@ export interface PlexWatchlistItem {
ratingKey: string;
tmdbId: number;
tvdbId?: number;
type: 'movie' | 'show';
type: 'movie' | 'show' | 'album';
title: string;
}

View File

@@ -0,0 +1,422 @@
import logger from '@server/logger';
import ServarrBase from './base';
interface LidarrMediaResult {
id: number;
mbId: string;
media_type: string;
}
export interface LidarrArtistResult extends LidarrMediaResult {
artist: {
media_type: 'artist';
artistName: string;
overview: string;
remotePoster?: string;
artistType: string;
genres: string[];
};
}
export interface LidarrAlbumResult extends LidarrMediaResult {
album: {
disambiguation: string;
duration: number;
mediumCount: number;
ratings: LidarrRating | undefined;
links: never[];
media_type: 'music';
title: string;
foreignAlbumId: string;
overview: string;
releaseDate: string;
albumType: string;
genres: string[];
images: LidarrImage[];
artist: {
id: number;
status: string;
ended: boolean;
foreignArtistId: string;
tadbId: number;
discogsId: number;
artistType: string;
disambiguation: string | undefined;
links: never[];
images: never[];
genres: never[];
cleanName: string | undefined;
sortName: string | undefined;
tags: never[];
added: string;
ratings: LidarrRating | undefined;
artistName: string;
overview: string;
};
};
}
export interface LidarrArtistDetails {
id: number;
foreignArtistId: string;
status: string;
ended: boolean;
artistName: string;
tadbId: number;
discogsId: number;
overview: string;
artistType: string;
disambiguation: string;
links: LidarrLink[];
nextAlbum: LidarrAlbumResult | null;
lastAlbum: LidarrAlbumResult | null;
images: LidarrImage[];
qualityProfileId: number;
profileId: number;
metadataProfileId: number;
monitored: boolean;
monitorNewItems: string;
genres: string[];
tags: string[];
added: string;
ratings: LidarrRating;
remotePoster?: string;
cleanName?: string;
sortName?: string;
}
export interface LidarrAlbumDetails {
id: number;
mbId: string;
foreignArtistId: string;
hasFile: boolean;
monitored: boolean;
title: string;
titleSlug: string;
path: string;
artistName: string;
disambiguation: string;
overview: string;
artistId: number;
foreignAlbumId: string;
anyReleaseOk: boolean;
profileId: number;
qualityProfileId: number;
duration: number;
isAvailable: boolean;
folderName: string;
metadataProfileId: number;
added: string;
albumType: string;
secondaryTypes: string[];
mediumCount: number;
ratings: LidarrRating;
releaseDate: string;
releases: {
id: number;
albumId: number;
foreignReleaseId: string;
title: string;
status: string;
duration: number;
trackCount: number;
media: unknown[];
mediumCount: number;
disambiguation: string;
country: unknown[];
label: unknown[];
format: string;
monitored: boolean;
}[];
genres: string[];
media: {
mediumNumber: number;
mediumName: string;
mediumFormat: string;
}[];
artist: LidarrArtistDetails & {
artistName: string;
nextAlbum: unknown | null;
lastAlbum: unknown | null;
};
images: LidarrImage[];
links: {
url: string;
name: string;
}[];
remoteCover?: string;
}
export interface LidarrImage {
url: string;
coverType: string;
}
export interface LidarrRating {
votes: number;
value: number;
}
export interface LidarrLink {
url: string;
name: string;
}
export interface LidarrRelease {
id: number;
albumId: number;
foreignReleaseId: string;
title: string;
status: string;
duration: number;
trackCount: number;
media: LidarrMedia[];
}
export interface LidarrMedia {
mediumNumber: number;
mediumFormat: string;
mediumName: string;
}
export interface LidarrSearchResponse {
page: number;
total_results: number;
total_pages: number;
results: (LidarrArtistResult | LidarrAlbumResult)[];
}
export interface LidarrAlbumOptions {
[key: string]: unknown;
title: string;
disambiguation?: string;
overview?: string;
artistId: number;
foreignAlbumId: string;
monitored: boolean;
anyReleaseOk: boolean;
profileId: number;
duration?: number;
albumType: string;
secondaryTypes: string[];
mediumCount?: number;
ratings?: LidarrRating;
releaseDate?: string;
releases: unknown[];
genres: string[];
media: unknown[];
artist: {
status: string;
ended: boolean;
artistName: string;
foreignArtistId: string;
tadbId?: number;
discogsId?: number;
overview?: string;
artistType: string;
disambiguation?: string;
links: LidarrLink[];
images: LidarrImage[];
path: string;
qualityProfileId: number;
metadataProfileId: number;
monitored: boolean;
monitorNewItems: string;
rootFolderPath: string;
genres: string[];
cleanName?: string;
sortName?: string;
tags: number[];
added?: string;
ratings?: LidarrRating;
id: number;
};
images: LidarrImage[];
links: LidarrLink[];
addOptions: {
searchForNewAlbum: boolean;
};
}
export interface LidarrArtistOptions {
[key: string]: unknown;
artistName: string;
qualityProfileId: number;
profileId: number;
rootFolderPath: string;
foreignArtistId: string;
monitored: boolean;
tags: number[];
searchNow: boolean;
monitorNewItems: string;
monitor: string;
searchForMissingAlbums: boolean;
}
export interface LidarrAlbum {
id: number;
mbId: string;
title: string;
monitored: boolean;
artistId: number;
foreignAlbumId: string;
titleSlug: string;
profileId: number;
duration: number;
albumType: string;
statistics: {
trackFileCount: number;
trackCount: number;
totalTrackCount: number;
sizeOnDisk: number;
percentOfTracks: number;
};
}
export interface SearchCommand extends Record<string, unknown> {
name: 'AlbumSearch';
albumIds: number[];
}
export interface MetadataProfile {
id: number;
name: string;
}
class LidarrAPI extends ServarrBase<{ albumId: number }> {
protected apiKey: string;
constructor({ url, apiKey }: { url: string; apiKey: string }) {
super({ url, apiKey, cacheName: 'lidarr', apiName: 'Lidarr' });
this.apiKey = apiKey;
}
public async getAlbums(): Promise<LidarrAlbum[]> {
try {
const data = await this.get<LidarrAlbum[]>('/album');
return data;
} catch (e) {
throw new Error(`[Lidarr] Failed to retrieve albums: ${e.message}`);
}
}
public async getAlbum({ id }: { id: number }): Promise<LidarrAlbum> {
try {
const data = await this.get<LidarrAlbum>(`/album/${id}`);
return data;
} catch (e) {
throw new Error(`[Lidarr] Failed to retrieve album: ${e.message}`);
}
}
public async removeAlbum(albumId: number): Promise<void> {
try {
await this.axios.delete(`/album/${albumId}`, {
params: {
deleteFiles: 'true',
addImportExclusion: 'false',
},
});
logger.info(`[Lidarr] Removed album ${albumId}`);
} catch (e) {
throw new Error(`[Lidarr] Failed to remove album: ${e.message}`);
}
}
public async searchAlbum(mbid: string): Promise<LidarrAlbumResult[]> {
try {
const data = await this.get<LidarrAlbumResult[]>('/search', {
params: {
term: `lidarr:${mbid}`,
},
});
return data;
} catch (e) {
throw new Error(`[Lidarr] Failed to search album: ${e.message}`);
}
}
public async addAlbum(options: LidarrAlbumOptions): Promise<LidarrAlbum> {
try {
const existingAlbums = await this.get<LidarrAlbum[]>('/album', {
params: {
foreignAlbumId: options.foreignAlbumId,
includeAllArtistAlbums: 'false',
},
});
if (existingAlbums.length > 0 && existingAlbums[0].monitored) {
logger.info(
'Album is already monitored in Lidarr. Skipping add and returning success',
{
label: 'Lidarr',
}
);
return existingAlbums[0];
}
if (existingAlbums.length > 0) {
logger.info(
'Album exists in Lidarr but is not monitored. Updating monitored status.',
{
label: 'Lidarr',
albumId: existingAlbums[0].id,
albumTitle: existingAlbums[0].title,
}
);
const updatedAlbum = await this.axios.put<LidarrAlbum>(
`/album/${existingAlbums[0].id}`,
{
...existingAlbums[0],
monitored: true,
}
);
await this.post('/command', {
name: 'AlbumSearch',
albumIds: [updatedAlbum.data.id],
});
return updatedAlbum.data;
}
const data = await this.post<LidarrAlbum>('/album', {
...options,
monitored: true,
});
return data;
} catch (e) {
throw new Error(`[Lidarr] Failed to add album: ${e.message}`);
}
}
public async searchAlbumByMusicBrainzId(
mbid: string
): Promise<LidarrAlbumResult[]> {
try {
const data = await this.get<LidarrAlbumResult[]>('/search', {
params: {
term: `lidarr:${mbid}`,
},
});
return data;
} catch (e) {
throw new Error(
`[Lidarr] Failed to search album by MusicBrainz ID: ${e.message}`
);
}
}
public async getMetadataProfiles(): Promise<MetadataProfile[]> {
try {
const data = await this.get<MetadataProfile[]>('/metadataProfile');
return data;
} catch (e) {
throw new Error(
`[Lidarr] Failed to retrieve metadata profiles: ${e.message}`
);
}
}
}
export default LidarrAPI;

View File

@@ -0,0 +1,219 @@
import ExternalAPI from '@server/api/externalapi';
import { getRepository } from '@server/datasource';
import MetadataArtist from '@server/entity/MetadataArtist';
import cacheManager from '@server/lib/cache';
import logger from '@server/logger';
import { In } from 'typeorm';
import type { TadbArtistResponse } from './interfaces';
class TheAudioDb extends ExternalAPI {
private readonly apiKey = '195003';
private readonly CACHE_TTL = 43200;
private readonly STALE_THRESHOLD = 30 * 24 * 60 * 60 * 1000;
constructor() {
super(
'https://www.theaudiodb.com/api/v1/json',
{},
{
nodeCache: cacheManager.getCache('tadb').data,
rateLimit: {
maxRequests: 20,
maxRPS: 25,
},
}
);
}
private isMetadataStale(metadata: MetadataArtist | null): boolean {
if (!metadata || !metadata.tadbUpdatedAt) return true;
return Date.now() - metadata.tadbUpdatedAt.getTime() > this.STALE_THRESHOLD;
}
private createEmptyResponse() {
return { artistThumb: null, artistBackground: null };
}
public async getArtistImagesFromCache(id: string): Promise<
| {
artistThumb: string | null;
artistBackground: string | null;
}
| null
| undefined
> {
try {
const metadata = await getRepository(MetadataArtist).findOne({
where: { mbArtistId: id },
select: ['tadbThumb', 'tadbCover', 'tadbUpdatedAt'],
});
if (metadata) {
return {
artistThumb: metadata.tadbThumb,
artistBackground: metadata.tadbCover,
};
}
return undefined;
} catch (error) {
logger.error('Failed to fetch artist images from cache', {
label: 'TheAudioDb',
id,
error: error instanceof Error ? error.message : 'Unknown error',
});
return null;
}
}
public async getArtistImages(
id: string
): Promise<{ artistThumb: string | null; artistBackground: string | null }> {
try {
const metadata = await getRepository(MetadataArtist).findOne({
where: { mbArtistId: id },
select: ['tadbThumb', 'tadbCover', 'tadbUpdatedAt'],
});
if (metadata?.tadbThumb || metadata?.tadbCover) {
return {
artistThumb: metadata.tadbThumb,
artistBackground: metadata.tadbCover,
};
}
if (metadata && !this.isMetadataStale(metadata)) {
return this.createEmptyResponse();
}
return await this.fetchArtistImages(id);
} catch (error) {
logger.error('Failed to get artist images', {
label: 'TheAudioDb',
id,
error: error instanceof Error ? error.message : 'Unknown error',
});
return this.createEmptyResponse();
}
}
private async fetchArtistImages(id: string): Promise<{
artistThumb: string | null;
artistBackground: string | null;
}> {
try {
const data = await this.get<TadbArtistResponse>(
`/${this.apiKey}/artist-mb.php`,
{ params: { i: id } },
this.CACHE_TTL
);
const result = {
artistThumb: data.artists?.[0]?.strArtistThumb || null,
artistBackground: data.artists?.[0]?.strArtistFanart || null,
};
const metadataRepository = getRepository(MetadataArtist);
await metadataRepository
.upsert(
{
mbArtistId: id,
tadbThumb: result.artistThumb,
tadbCover: result.artistBackground,
tadbUpdatedAt: new Date(),
},
{
conflictPaths: ['mbArtistId'],
}
)
.catch((e) => {
logger.error('Failed to save artist metadata', {
label: 'TheAudioDb',
error: e instanceof Error ? e.message : 'Unknown error',
});
});
return result;
} catch (error) {
await getRepository(MetadataArtist).upsert(
{
mbArtistId: id,
tadbThumb: null,
tadbCover: null,
tadbUpdatedAt: new Date(),
},
{
conflictPaths: ['mbArtistId'],
}
);
return this.createEmptyResponse();
}
}
public async batchGetArtistImages(ids: string[]): Promise<
Record<
string,
{
artistThumb: string | null;
artistBackground: string | null;
}
>
> {
if (!ids.length) return {};
const metadataRepository = getRepository(MetadataArtist);
const existingMetadata = await metadataRepository.find({
where: { mbArtistId: In(ids) },
select: ['mbArtistId', 'tadbThumb', 'tadbCover', 'tadbUpdatedAt'],
});
const results: Record<
string,
{
artistThumb: string | null;
artistBackground: string | null;
}
> = {};
const idsToFetch: string[] = [];
ids.forEach((id) => {
const metadata = existingMetadata.find((m) => m.mbArtistId === id);
if (metadata?.tadbThumb || metadata?.tadbCover) {
results[id] = {
artistThumb: metadata.tadbThumb,
artistBackground: metadata.tadbCover,
};
} else if (metadata && !this.isMetadataStale(metadata)) {
results[id] = {
artistThumb: null,
artistBackground: null,
};
} else {
idsToFetch.push(id);
}
});
if (idsToFetch.length > 0) {
const batchPromises = idsToFetch.map((id) =>
this.fetchArtistImages(id)
.then((response) => {
results[id] = response;
return true;
})
.catch(() => {
results[id] = {
artistThumb: null,
artistBackground: null,
};
return false;
})
);
await Promise.all(batchPromises);
}
return results;
}
}
export default TheAudioDb;

View File

@@ -0,0 +1,8 @@
interface TadbArtist {
strArtistThumb: string | null;
strArtistFanart: string | null;
}
export interface TadbArtistResponse {
artists?: TadbArtist[];
}

View File

@@ -73,6 +73,7 @@ export interface TmdbCertificationResponse {
interface DiscoverMovieOptions {
page?: number;
includeAdult?: boolean;
includeVideo?: boolean;
language?: string;
primaryReleaseDateGte?: string;
primaryReleaseDateLte?: string;
@@ -490,6 +491,7 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
sortBy = 'popularity.desc',
page = 1,
includeAdult = false,
includeVideo = true,
language = this.locale,
primaryReleaseDateGte,
primaryReleaseDateLte,
@@ -527,6 +529,7 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
sort_by: sortBy,
page,
include_adult: includeAdult,
include_video: includeVideo,
language,
region: this.discoverRegion || '',
with_original_language:

View File

@@ -42,6 +42,7 @@ export interface TmdbCollectionResult {
export interface TmdbPersonResult {
id: number;
known_for_department: string;
name: string;
popularity: number;
profile_path?: string;
@@ -464,6 +465,10 @@ export interface TmdbCompanySearchResponse extends TmdbPaginatedResponse {
results: TmdbCompany[];
}
export interface TmdbSearchPersonResponse extends TmdbPaginatedResponse {
results: TmdbPersonResult[];
}
export interface TmdbWatchProviderRegion {
iso_3166_1: string;
english_name: string;

View File

@@ -0,0 +1,341 @@
import ExternalAPI from '@server/api/externalapi';
import TheMovieDb from '@server/api/themoviedb';
import { getRepository } from '@server/datasource';
import MetadataArtist from '@server/entity/MetadataArtist';
import cacheManager from '@server/lib/cache';
import logger from '@server/logger';
import { In } from 'typeorm';
import type { TmdbSearchPersonResponse } from './interfaces';
interface SearchPersonOptions {
query: string;
page?: number;
includeAdult?: boolean;
language?: string;
}
class TmdbPersonMapper extends ExternalAPI {
private readonly CACHE_TTL = 43200;
private readonly STALE_THRESHOLD = 30 * 24 * 60 * 60 * 1000;
private tmdb: TheMovieDb;
constructor() {
super(
'https://api.themoviedb.org/3',
{
api_key: '431a8708161bcd1f1fbe7536137e61ed',
},
{
nodeCache: cacheManager.getCache('tmdb').data,
rateLimit: {
maxRequests: 20,
maxRPS: 50,
},
}
);
this.tmdb = new TheMovieDb();
}
private isMetadataStale(metadata: MetadataArtist | null): boolean {
if (!metadata || !metadata.tmdbUpdatedAt) return true;
return Date.now() - metadata.tmdbUpdatedAt.getTime() > this.STALE_THRESHOLD;
}
private createEmptyResponse() {
return { personId: null, profilePath: null };
}
public async getMappingFromCache(
artistId: string
): Promise<{ personId: number | null; profilePath: string | null } | null> {
try {
const metadata = await getRepository(MetadataArtist).findOne({
where: { mbArtistId: artistId },
select: ['tmdbPersonId', 'tmdbThumb', 'tmdbUpdatedAt'],
});
if (!metadata) {
return null;
}
if (this.isMetadataStale(metadata)) {
return null;
}
return {
personId: metadata.tmdbPersonId ? Number(metadata.tmdbPersonId) : null,
profilePath: metadata.tmdbThumb,
};
} catch (error) {
logger.error('Failed to get person mapping from cache', {
label: 'TmdbPersonMapper',
artistId,
error: error instanceof Error ? error.message : 'Unknown error',
});
return null;
}
}
public async getMapping(
artistId: string,
artistName: string
): Promise<{ personId: number | null; profilePath: string | null }> {
try {
const metadata = await getRepository(MetadataArtist).findOne({
where: { mbArtistId: artistId },
select: ['tmdbPersonId', 'tmdbThumb', 'tmdbUpdatedAt'],
});
if (metadata?.tmdbPersonId || metadata?.tmdbThumb) {
return {
personId: metadata.tmdbPersonId
? Number(metadata.tmdbPersonId)
: null,
profilePath: metadata.tmdbThumb,
};
}
if (metadata && !this.isMetadataStale(metadata)) {
return this.createEmptyResponse();
}
return await this.fetchMapping(artistId, artistName);
} catch (error) {
logger.error('Failed to get person mapping', {
label: 'TmdbPersonMapper',
artistId,
error: error instanceof Error ? error.message : 'Unknown error',
});
return this.createEmptyResponse();
}
}
private async fetchMapping(
artistId: string,
artistName: string
): Promise<{ personId: number | null; profilePath: string | null }> {
try {
const existingMetadata = await getRepository(MetadataArtist).findOne({
where: { mbArtistId: artistId },
select: ['tmdbPersonId', 'tmdbThumb', 'tmdbUpdatedAt'],
});
if (existingMetadata?.tmdbPersonId) {
return {
personId: Number(existingMetadata.tmdbPersonId),
profilePath: existingMetadata.tmdbThumb,
};
}
const cleanArtistName = artistName
.split(/(?:(?:feat|ft)\.?\s+|&\s*|,\s+)/i)[0]
.trim()
.replace(/[']/g, "'");
const searchResults = await this.get<TmdbSearchPersonResponse>(
'/search/person',
{
params: {
query: cleanArtistName,
page: '1',
include_adult: 'false',
language: 'en',
},
},
this.CACHE_TTL
);
const normalizeName = (name: string): string => {
return name
.toLowerCase()
.normalize('NFKD')
.replace(/[\u0300-\u036f]/g, '')
.replace(/[']/g, "'")
.replace(/[^a-z0-9\s]/g, '')
.trim();
};
const exactMatches = searchResults.results.filter((person) => {
const normalizedPersonName = normalizeName(person.name);
const normalizedArtistName = normalizeName(cleanArtistName);
return normalizedPersonName === normalizedArtistName;
});
if (exactMatches.length > 0) {
const tmdbPersonIds = exactMatches.map((match) => match.id.toString());
const existingMappings = await getRepository(MetadataArtist).find({
where: { tmdbPersonId: In(tmdbPersonIds) },
select: ['mbArtistId', 'tmdbPersonId'],
});
const availableMatches = exactMatches.filter(
(match) =>
!existingMappings.some(
(mapping) =>
mapping.tmdbPersonId === match.id.toString() &&
mapping.mbArtistId !== artistId
)
);
const soundMatches = availableMatches.filter(
(person) => person.known_for_department === 'Sound'
);
const exactMatch =
soundMatches.length > 0
? soundMatches.reduce((prev, current) =>
current.popularity > prev.popularity ? current : prev
)
: availableMatches.length > 0
? availableMatches.reduce((prev, current) =>
current.popularity > prev.popularity ? current : prev
)
: null;
const mapping = {
personId: exactMatch?.id ?? null,
profilePath: exactMatch?.profile_path
? `https://image.tmdb.org/t/p/w500${exactMatch.profile_path}`
: null,
};
await getRepository(MetadataArtist)
.upsert(
{
mbArtistId: artistId,
tmdbPersonId: mapping.personId?.toString() ?? null,
tmdbThumb: mapping.profilePath,
tmdbUpdatedAt: new Date(),
},
{
conflictPaths: ['mbArtistId'],
}
)
.catch((e) => {
logger.error('Failed to save artist metadata', {
label: 'TmdbPersonMapper',
error: e instanceof Error ? e.message : 'Unknown error',
});
});
return mapping;
} else {
await getRepository(MetadataArtist).upsert(
{
mbArtistId: artistId,
tmdbPersonId: null,
tmdbThumb: null,
tmdbUpdatedAt: new Date(),
},
{
conflictPaths: ['mbArtistId'],
}
);
return this.createEmptyResponse();
}
} catch (error) {
await getRepository(MetadataArtist).upsert(
{
mbArtistId: artistId,
tmdbPersonId: null,
tmdbThumb: null,
tmdbUpdatedAt: new Date(),
},
{
conflictPaths: ['mbArtistId'],
}
);
return this.createEmptyResponse();
}
}
public async batchGetMappings(
artists: { artistId: string; artistName: string }[]
): Promise<
Record<string, { personId: number | null; profilePath: string | null }>
> {
if (!artists.length) return {};
const metadataRepository = getRepository(MetadataArtist);
const artistIds = artists.map((a) => a.artistId);
const existingMetadata = await metadataRepository.find({
where: { mbArtistId: In(artistIds) },
select: ['mbArtistId', 'tmdbPersonId', 'tmdbThumb', 'tmdbUpdatedAt'],
});
const results: Record<
string,
{ personId: number | null; profilePath: string | null }
> = {};
const artistsToFetch: { artistId: string; artistName: string }[] = [];
artists.forEach(({ artistId, artistName }) => {
const metadata = existingMetadata.find((m) => m.mbArtistId === artistId);
if (metadata?.tmdbPersonId || metadata?.tmdbThumb) {
results[artistId] = {
personId: metadata.tmdbPersonId
? Number(metadata.tmdbPersonId)
: null,
profilePath: metadata.tmdbThumb,
};
} else if (metadata && !this.isMetadataStale(metadata)) {
results[artistId] = this.createEmptyResponse();
} else {
artistsToFetch.push({ artistId, artistName });
}
});
if (artistsToFetch.length > 0) {
const batchSize = 5;
for (let i = 0; i < artistsToFetch.length; i += batchSize) {
const batch = artistsToFetch.slice(i, i + batchSize);
const batchPromises = batch.map(({ artistId, artistName }) =>
this.fetchMapping(artistId, artistName)
.then((mapping) => {
results[artistId] = mapping;
return true;
})
.catch(() => {
results[artistId] = this.createEmptyResponse();
return false;
})
);
await Promise.all(batchPromises);
}
}
return results;
}
public async searchPerson(
options: SearchPersonOptions
): Promise<TmdbSearchPersonResponse> {
try {
return await this.get<TmdbSearchPersonResponse>(
'/search/person',
{
params: {
query: options.query,
page: options.page?.toString() ?? '1',
include_adult: options.includeAdult ? 'true' : 'false',
language: options.language ?? 'en',
},
},
this.CACHE_TTL
);
} catch (e) {
return {
page: 1,
results: [],
total_pages: 1,
total_results: 0,
};
}
}
}
export default TmdbPersonMapper;

View File

@@ -22,6 +22,8 @@ export enum DiscoverSliderType {
TMDB_NETWORK,
TMDB_MOVIE_STREAMING_SERVICES,
TMDB_TV_STREAMING_SERVICES,
POPULAR_ALBUMS,
POPULAR_ARTISTS,
}
export const defaultSliders: Partial<DiscoverSlider>[] = [
@@ -97,4 +99,16 @@ export const defaultSliders: Partial<DiscoverSlider>[] = [
isBuiltIn: true,
order: 11,
},
{
type: DiscoverSliderType.POPULAR_ALBUMS,
enabled: true,
isBuiltIn: true,
order: 12,
},
{
type: DiscoverSliderType.POPULAR_ARTISTS,
enabled: true,
isBuiltIn: true,
order: 13,
},
];

View File

@@ -3,6 +3,7 @@ export enum IssueType {
AUDIO = 2,
SUBTITLES = 3,
OTHER = 4,
LYRICS = 5,
}
export enum IssueStatus {
@@ -15,4 +16,5 @@ export const IssueTypeName = {
[IssueType.VIDEO]: 'Video',
[IssueType.SUBTITLES]: 'Subtitle',
[IssueType.OTHER]: 'Other',
[IssueType.LYRICS]: 'Lyrics',
};

View File

@@ -9,6 +9,7 @@ export enum MediaRequestStatus {
export enum MediaType {
MOVIE = 'movie',
TV = 'tv',
MUSIC = 'music',
}
export enum MediaStatus {

View File

@@ -77,7 +77,7 @@ const postgresDevConfig: DataSourceOptions = {
database: process.env.DB_NAME ?? 'seerr',
ssl: buildSslConfig(),
synchronize: false,
migrationsRun: false,
migrationsRun: true,
logging: boolFromEnv('DB_LOG_QUERIES'),
entities: ['server/entity/**/*.ts'],
migrations: ['server/migration/postgres/**/*.ts'],

View File

@@ -18,7 +18,7 @@ import {
import type { ZodNumber, ZodOptional, ZodString } from 'zod';
@Entity()
@Unique(['tmdbId'])
@Unique(['tmdbId', 'mbId'])
export class Blacklist implements BlacklistItem {
@PrimaryGeneratedColumn()
public id: number;
@@ -29,9 +29,13 @@ export class Blacklist implements BlacklistItem {
@Column({ nullable: true, type: 'varchar' })
title?: string;
@Column()
@Column({ nullable: true })
@Index()
public tmdbId: number;
public tmdbId?: number;
@Column({ nullable: true })
@Index()
public mbId?: string;
@ManyToOne(() => User, (user) => user.id, {
eager: true,
@@ -61,7 +65,8 @@ export class Blacklist implements BlacklistItem {
blacklistRequest: {
mediaType: MediaType;
title?: ZodOptional<ZodString>['_output'];
tmdbId: ZodNumber['_output'];
tmdbId?: ZodNumber['_output'];
mbId?: ZodOptional<ZodString>['_output'];
blacklistedTags?: string;
};
},
@@ -74,9 +79,10 @@ export class Blacklist implements BlacklistItem {
const mediaRepository = em.getRepository(Media);
let media = await mediaRepository.findOne({
where: {
tmdbId: blacklistRequest.tmdbId,
},
where:
blacklistRequest.mediaType === 'music'
? { mbId: blacklistRequest.mbId }
: { tmdbId: blacklistRequest.tmdbId },
});
const blacklistRepository = em.getRepository(this);
@@ -86,6 +92,7 @@ export class Blacklist implements BlacklistItem {
if (!media) {
media = new Media({
tmdbId: blacklistRequest.tmdbId,
mbId: blacklistRequest.mbId,
status: MediaStatus.BLACKLISTED,
status4k: MediaStatus.BLACKLISTED,
mediaType: blacklistRequest.mediaType,

View File

@@ -1,3 +1,4 @@
import LidarrAPI from '@server/api/servarr/lidarr';
import RadarrAPI from '@server/api/servarr/radarr';
import SonarrAPI from '@server/api/servarr/sonarr';
import { MediaStatus, MediaType } from '@server/constants/media';
@@ -29,16 +30,16 @@ import Season from './Season';
class Media {
public static async getRelatedMedia(
user: User | undefined,
tmdbIds: number | number[]
ids: number | number[] | string | string[]
): Promise<Media[]> {
const mediaRepository = getRepository(Media);
try {
let finalIds: number[];
if (!Array.isArray(tmdbIds)) {
finalIds = [tmdbIds];
let finalIds: (number | string)[];
if (!Array.isArray(ids)) {
finalIds = [ids];
} else {
finalIds = tmdbIds;
finalIds = ids;
}
if (finalIds.length === 0) {
@@ -50,10 +51,15 @@ class Media {
.leftJoinAndSelect(
'media.watchlists',
'watchlist',
'media.id= watchlist.media and watchlist.requestedBy = :userId',
'media.id = watchlist.media and watchlist.requestedBy = :userId',
{ userId: user?.id }
) //,
.where(' media.tmdbId in (:...finalIds)', { finalIds })
)
.where(
typeof finalIds[0] === 'string'
? 'media.mbId IN (:...finalIds)'
: 'media.tmdbId IN (:...finalIds)',
{ finalIds }
)
.getMany();
return media;
@@ -64,14 +70,17 @@ class Media {
}
public static async getMedia(
id: number,
id: number | string,
mediaType: MediaType
): Promise<Media | undefined> {
const mediaRepository = getRepository(Media);
try {
const media = await mediaRepository.findOne({
where: { tmdbId: id, mediaType: mediaType },
where:
typeof id === 'string'
? { mbId: id, mediaType }
: { tmdbId: id, mediaType },
relations: { requests: true, issues: true },
});
@@ -88,7 +97,7 @@ class Media {
@Column({ type: 'varchar' })
public mediaType: MediaType;
@Column()
@Column({ nullable: true })
@Index()
public tmdbId: number;
@@ -100,6 +109,10 @@ class Media {
@Index()
public imdbId?: string;
@Column({ nullable: true })
@Index()
public mbId?: string;
@Column({ type: 'int', default: MediaStatus.UNKNOWN })
public status: MediaStatus;
@@ -155,7 +168,7 @@ class Media {
})
public mediaAddedAt: Date;
@Column({ nullable: true, type: 'int' })
@Column({ nullable: false, type: 'int', default: 0 })
public serviceId?: number | null;
@Column({ nullable: true, type: 'int' })
@@ -319,6 +332,21 @@ class Media {
}
}
}
if (this.mediaType === MediaType.MUSIC) {
if (this.serviceId !== null && this.externalServiceSlug !== null) {
const settings = getSettings();
const server = settings.lidarr.find(
(lidarr) => lidarr.id === this.serviceId
);
if (server) {
this.serviceUrl = server.externalUrl
? `${server.externalUrl}/album/${this.externalServiceSlug}`
: LidarrAPI.buildUrl(server, `/album/${this.externalServiceSlug}`);
}
}
}
}
@AfterLoad()
@@ -374,6 +402,20 @@ class Media {
);
}
}
if (this.mediaType === MediaType.MUSIC) {
if (
this.externalServiceId !== undefined &&
this.externalServiceId !== null &&
this.serviceId !== undefined &&
this.serviceId !== null
) {
this.downloadStatus = downloadTracker.getMusicProgress(
this.serviceId,
this.externalServiceId
);
}
}
}
}

View File

@@ -1,6 +1,14 @@
import CoverArtArchive from '@server/api/coverartarchive';
import ListenBrainzAPI from '@server/api/listenbrainz';
import type { LbAlbumDetails } from '@server/api/listenbrainz/interfaces';
import MusicBrainz from '@server/api/musicbrainz';
import TheMovieDb from '@server/api/themoviedb';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
import type { TmdbKeyword } from '@server/api/themoviedb/interfaces';
import type {
TmdbKeyword,
TmdbMovieDetails,
TmdbTvDetails,
} from '@server/api/themoviedb/interfaces';
import {
MediaRequestStatus,
MediaStatus,
@@ -48,6 +56,7 @@ export class MediaRequest {
options: MediaRequestOptions = {}
): Promise<MediaRequest> {
const tmdb = new TheMovieDb();
const listenBrainz = new ListenBrainzAPI();
const mediaRepository = getRepository(Media);
const requestRepository = getRepository(MediaRequest);
const userRepository = getRepository(User);
@@ -115,25 +124,55 @@ export class MediaRequest {
throw new QuotaRestrictedError('Movie Quota exceeded.');
} else if (requestBody.mediaType === MediaType.TV && quotas.tv.restricted) {
throw new QuotaRestrictedError('Series Quota exceeded.');
} else if (
requestBody.mediaType === MediaType.MUSIC &&
quotas.music.restricted
) {
throw new QuotaRestrictedError('Music Quota exceeded.');
}
const tmdbMedia =
const requestedMedia =
requestBody.mediaType === MediaType.MOVIE
? await tmdb.getMovie({ movieId: requestBody.mediaId })
: await tmdb.getTvShow({ tvId: requestBody.mediaId });
: requestBody.mediaType === MediaType.TV
? await tmdb.getTvShow({ tvId: requestBody.mediaId })
: await listenBrainz.getAlbum(requestBody.mediaId.toString());
let media = await mediaRepository.findOne({
where: {
tmdbId: requestBody.mediaId,
mediaType: requestBody.mediaType,
},
where:
requestBody.mediaType === MediaType.MUSIC
? {
mbId: requestBody.mediaId.toString(),
mediaType: requestBody.mediaType,
}
: {
tmdbId: requestBody.mediaId,
mediaType: requestBody.mediaType,
},
relations: ['requests'],
});
const isTmdbMedia = (
media: TmdbMovieDetails | TmdbTvDetails | LbAlbumDetails
): media is TmdbMovieDetails | TmdbTvDetails => {
return 'id' in media;
};
const isLbAlbum = (
media: TmdbMovieDetails | TmdbTvDetails | LbAlbumDetails
): media is LbAlbumDetails => {
return 'release_group_mbid' in media;
};
if (!media) {
media = new Media({
tmdbId: tmdbMedia.id,
tvdbId: requestBody.tvdbId ?? tmdbMedia.external_ids.tvdb_id,
tmdbId: isTmdbMedia(requestedMedia) ? requestedMedia.id : undefined,
mbId: isLbAlbum(requestedMedia)
? requestedMedia.release_group_mbid
: undefined,
tvdbId: isTmdbMedia(requestedMedia)
? requestBody.tvdbId ?? requestedMedia.external_ids?.tvdb_id
: undefined,
status: !requestBody.is4k ? MediaStatus.PENDING : MediaStatus.UNKNOWN,
status4k: requestBody.is4k ? MediaStatus.PENDING : MediaStatus.UNKNOWN,
mediaType: requestBody.mediaType,
@@ -141,7 +180,9 @@ export class MediaRequest {
} else {
if (media.status === MediaStatus.BLACKLISTED) {
logger.warn('Request for media blocked due to being blacklisted', {
tmdbId: tmdbMedia.id,
id: isLbAlbum(requestedMedia)
? requestedMedia.release_group_mbid
: requestedMedia.id,
mediaType: requestBody.mediaType,
label: 'Media Request',
});
@@ -163,7 +204,21 @@ export class MediaRequest {
.leftJoin('request.media', 'media')
.leftJoinAndSelect('request.requestedBy', 'user')
.where('request.is4k = :is4k', { is4k: requestBody.is4k })
.andWhere('media.tmdbId = :tmdbId', { tmdbId: tmdbMedia.id })
.andWhere(
requestBody.mediaType === 'music'
? 'media.mbId = :mbId'
: 'media.tmdbId = :tmdbId',
requestBody.mediaType === 'music'
? {
mbId: (requestedMedia as { release_group_mbid: string })
.release_group_mbid,
}
: {
tmdbId: isTmdbMedia(requestedMedia)
? requestedMedia.id
: undefined,
}
)
.andWhere('media.mediaType = :mediaType', {
mediaType: requestBody.mediaType,
})
@@ -172,12 +227,16 @@ export class MediaRequest {
if (existing && existing.length > 0) {
// If there is an existing movie request that isn't declined, don't allow a new one.
if (
requestBody.mediaType === MediaType.MOVIE &&
(requestBody.mediaType === MediaType.MOVIE ||
requestBody.mediaType === MediaType.MUSIC) &&
existing[0].status !== MediaRequestStatus.DECLINED &&
existing[0].status !== MediaRequestStatus.COMPLETED
) {
logger.warn('Duplicate request for media blocked', {
tmdbId: tmdbMedia.id,
id:
requestBody.mediaType === MediaType.MUSIC
? media.mbId
: (requestedMedia as TmdbMovieDetails | TmdbTvDetails).id,
mediaType: requestBody.mediaType,
is4k: requestBody.is4k,
label: 'Media Request',
@@ -217,32 +276,78 @@ export class MediaRequest {
const defaultSonarrId = requestBody.is4k
? settings.sonarr.findIndex((s) => s.is4k && s.isDefault)
: settings.sonarr.findIndex((s) => !s.is4k && s.isDefault);
const defaultLidarrId = settings.lidarr.findIndex((l) => l.isDefault);
const overrideRuleRepository = getRepository(OverrideRule);
const overrideRules = await overrideRuleRepository.find({
where:
requestBody.mediaType === MediaType.MOVIE
? { radarrServiceId: defaultRadarrId }
: { sonarrServiceId: defaultSonarrId },
: requestBody.mediaType === MediaType.TV
? { sonarrServiceId: defaultSonarrId }
: { lidarrServiceId: defaultLidarrId },
});
const appliedOverrideRules = overrideRules.filter((rule) => {
const hasAnimeKeyword =
'results' in tmdbMedia.keywords &&
tmdbMedia.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
);
// Only apply keyword/genre rules for TMDB media
if (isTmdbMedia(requestedMedia)) {
const hasAnimeKeyword =
'results' in requestedMedia.keywords &&
requestedMedia.keywords.results.some(
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
);
// Skip override rules if the media is an anime TV show as anime TV
// is handled by default and override rules do not explicitly include
// the anime keyword
if (
requestBody.mediaType === MediaType.TV &&
hasAnimeKeyword &&
(!rule.keywords ||
!rule.keywords.split(',').map(Number).includes(ANIME_KEYWORD_ID))
) {
return false;
if (
requestBody.mediaType === MediaType.TV &&
hasAnimeKeyword &&
(!rule.keywords ||
!rule.keywords.split(',').map(Number).includes(ANIME_KEYWORD_ID))
) {
return false;
}
if (
rule.genre &&
!rule.genre
.split(',')
.some((genreId) =>
requestedMedia.genres
.map((genre) => genre.id)
.includes(Number(genreId))
)
) {
return false;
}
if (
rule.language &&
!rule.language
.split('|')
.some(
(languageId) => languageId === requestedMedia.original_language
)
) {
return false;
}
if (
rule.keywords &&
!rule.keywords.split(',').some((keywordId) => {
let keywordList: TmdbKeyword[] = [];
if ('keywords' in requestedMedia.keywords) {
keywordList = requestedMedia.keywords.keywords;
} else if ('results' in requestedMedia.keywords) {
keywordList = requestedMedia.keywords.results;
}
return keywordList
.map((keyword: TmdbKeyword) => keyword.id)
.includes(Number(keywordId));
})
) {
return false;
}
}
if (
@@ -253,44 +358,7 @@ export class MediaRequest {
) {
return false;
}
if (
rule.genre &&
!rule.genre
.split(',')
.some((genreId) =>
tmdbMedia.genres
.map((genre) => genre.id)
.includes(Number(genreId))
)
) {
return false;
}
if (
rule.language &&
!rule.language
.split('|')
.some((languageId) => languageId === tmdbMedia.original_language)
) {
return false;
}
if (
rule.keywords &&
!rule.keywords.split(',').some((keywordId) => {
let keywordList: TmdbKeyword[] = [];
if ('keywords' in tmdbMedia.keywords) {
keywordList = tmdbMedia.keywords.keywords;
} else if ('results' in tmdbMedia.keywords) {
keywordList = tmdbMedia.keywords.results;
}
return keywordList
.map((keyword: TmdbKeyword) => keyword.id)
.includes(Number(keywordId));
})
) {
return false;
}
return true;
});
@@ -373,10 +441,47 @@ export class MediaRequest {
isAutoRequest: options.isAutoRequest ?? false,
});
await requestRepository.save(request);
return request;
} else if (requestBody.mediaType === MediaType.MUSIC) {
await mediaRepository.save(media);
const request = new MediaRequest({
type: MediaType.MUSIC,
media,
requestedBy: requestUser,
// If the user is an admin or has the music auto approve permission, automatically approve the request
status: user.hasPermission(
[
Permission.AUTO_APPROVE,
Permission.AUTO_APPROVE_MUSIC,
Permission.MANAGE_REQUESTS,
],
{ type: 'or' }
)
? MediaRequestStatus.APPROVED
: MediaRequestStatus.PENDING,
modifiedBy: user.hasPermission(
[
Permission.AUTO_APPROVE,
Permission.AUTO_APPROVE_MUSIC,
Permission.MANAGE_REQUESTS,
],
{ type: 'or' }
)
? user
: undefined,
serverId: requestBody.serverId,
profileId: profileId,
rootFolder: rootFolder,
tags: tags,
isAutoRequest: options.isAutoRequest ?? false,
});
await requestRepository.save(request);
return request;
} else {
const tmdbMediaShow = tmdbMedia as Awaited<
const tmdbMediaShow = requestedMedia as Awaited<
ReturnType<typeof tmdb.getTvShow>
>;
let requestedSeasons =
@@ -715,9 +820,17 @@ export class MediaRequest {
type: Notification
) {
const tmdb = new TheMovieDb();
const listenbrainz = new ListenBrainzAPI();
const coverArt = new CoverArtArchive();
const musicbrainz = new MusicBrainz();
try {
const mediaType = entity.type === MediaType.MOVIE ? 'Movie' : 'Series';
const mediaType =
entity.type === MediaType.MOVIE
? 'Movie'
: entity.type === MediaType.TV
? 'Series'
: 'Album';
let event: string | undefined;
let notifyAdmin = true;
let notifySystem = true;
@@ -797,6 +910,34 @@ export class MediaRequest {
},
],
});
} else if (entity.type === MediaType.MUSIC && media.mbId) {
const album = await listenbrainz.getAlbum(media.mbId);
const coverArtResponse = await coverArt.getCoverArt(media.mbId);
const coverArtUrl =
coverArtResponse.images[0]?.thumbnails?.['250'] ?? '';
const artistId =
album.release_group_metadata?.artist?.artists[0]?.artist_mbid;
const artistWiki = artistId
? await musicbrainz.getArtistWikipediaExtract({
artistMbid: artistId,
})
: null;
notificationManager.sendNotification(type, {
media,
request: entity,
notifyAdmin,
notifySystem,
notifyUser: notifyAdmin ? undefined : entity.requestedBy,
event,
subject: `${album.release_group_metadata.release_group.name} by ${album.release_group_metadata.artist.name}`,
message: truncate(artistWiki?.content ?? '', {
length: 500,
separator: /\s/,
omission: '…',
}),
image: coverArtUrl,
});
}
} catch (e) {
logger.error('Something went wrong sending media notification(s)', {

View File

@@ -0,0 +1,31 @@
import {
Column,
CreateDateColumn,
Entity,
PrimaryGeneratedColumn,
UpdateDateColumn,
} from 'typeorm';
@Entity()
class MetadataAlbum {
@PrimaryGeneratedColumn()
public id: number;
@Column({ unique: true })
public mbAlbumId: string;
@Column({ nullable: true, type: 'varchar' })
public caaUrl: string | null;
@CreateDateColumn()
public createdAt: Date;
@UpdateDateColumn()
public updatedAt: Date;
constructor(init?: Partial<MetadataAlbum>) {
Object.assign(this, init);
}
}
export default MetadataAlbum;

View File

@@ -0,0 +1,43 @@
import { DbAwareColumn } from '@server/utils/DbColumnHelper';
import {
Column,
CreateDateColumn,
Entity,
PrimaryGeneratedColumn,
} from 'typeorm';
@Entity()
class MetadataArtist {
@PrimaryGeneratedColumn()
public id: number;
@Column({ unique: true })
public mbArtistId: string;
@Column({ nullable: true, type: 'varchar' })
public tmdbPersonId: string | null;
@Column({ nullable: true, type: 'varchar' })
public tmdbThumb: string | null;
@DbAwareColumn({ nullable: true, type: 'datetime' })
public tmdbUpdatedAt: Date | null;
@Column({ nullable: true, type: 'varchar' })
public tadbThumb: string | null;
@Column({ nullable: true, type: 'varchar' })
public tadbCover: string | null;
@DbAwareColumn({ nullable: true, type: 'datetime' })
public tadbUpdatedAt: Date | null;
@CreateDateColumn()
public createdAt: Date;
constructor(init?: Partial<MetadataArtist>) {
Object.assign(this, init);
}
}
export default MetadataArtist;

View File

@@ -12,6 +12,9 @@ class OverrideRule {
@Column({ type: 'int', nullable: true })
public sonarrServiceId?: number;
@Column({ type: 'int', nullable: true })
public lidarrServiceId?: number;
@Column({ nullable: true })
public users?: string;

View File

@@ -124,6 +124,12 @@ export class User {
@Column({ nullable: true })
public tvQuotaDays?: number;
@Column({ nullable: true })
public musicQuotaLimit?: number;
@Column({ nullable: true })
public musicQuotaDays?: number;
@OneToOne(() => UserSettings, (settings) => settings.user, {
cascade: true,
eager: true,
@@ -334,6 +340,30 @@ export class User {
).reduce((sum: number, req: MediaRequest) => sum + req.seasonCount, 0)
: 0;
const musicQuotaLimit = !canBypass
? this.musicQuotaLimit ?? defaultQuotas.music.quotaLimit
: 0;
const musicQuotaDays = this.musicQuotaDays ?? defaultQuotas.music.quotaDays;
// Count music requests made during quota period
const musicDate = new Date();
if (musicQuotaDays) {
musicDate.setDate(musicDate.getDate() - musicQuotaDays);
}
const musicQuotaUsed = musicQuotaLimit
? await requestRepository.count({
where: {
requestedBy: {
id: this.id,
},
createdAt: AfterDate(musicDate),
type: MediaType.MUSIC,
status: Not(MediaRequestStatus.DECLINED),
},
})
: 0;
return {
movie: {
days: movieQuotaDays,
@@ -357,6 +387,18 @@ export class User {
restricted:
tvQuotaLimit && tvQuotaLimit - tvQuotaUsed <= 0 ? true : false,
},
music: {
days: musicQuotaDays,
limit: musicQuotaLimit,
used: musicQuotaUsed,
remaining: musicQuotaLimit
? Math.max(0, musicQuotaLimit - musicQuotaUsed)
: undefined,
restricted:
musicQuotaLimit && musicQuotaLimit - musicQuotaUsed <= 0
? true
: false,
},
};
}
}

View File

@@ -26,6 +26,7 @@ export class NotFoundError extends Error {
@Entity()
@Unique('UNIQUE_USER_DB', ['tmdbId', 'requestedBy'])
@Unique('UNIQUE_USER_FOREIGN', ['mbId', 'requestedBy'])
export class Watchlist implements WatchlistItem {
@PrimaryGeneratedColumn()
id: number;
@@ -39,9 +40,13 @@ export class Watchlist implements WatchlistItem {
@Column({ type: 'varchar' })
title = '';
@Column()
@Column({ nullable: true })
@Index()
public tmdbId: number;
public tmdbId?: number;
@Column({ nullable: true })
@Index()
public mbId?: string;
@ManyToOne(() => User, (user) => user.watchlists, {
eager: true,
@@ -52,6 +57,7 @@ export class Watchlist implements WatchlistItem {
@ManyToOne(() => Media, (media) => media.watchlists, {
eager: true,
onDelete: 'CASCADE',
nullable: false,
})
public media: Media;
@@ -77,7 +83,8 @@ export class Watchlist implements WatchlistItem {
mediaType: MediaType;
ratingKey?: ZodOptional<ZodString>['_output'];
title?: ZodOptional<ZodString>['_output'];
tmdbId: ZodNumber['_output'];
tmdbId?: ZodNumber['_output'];
mbId?: ZodOptional<ZodString>['_output'];
};
user: User;
}): Promise<Watchlist> {
@@ -85,46 +92,88 @@ export class Watchlist implements WatchlistItem {
const mediaRepository = getRepository(Media);
const tmdb = new TheMovieDb();
const tmdbMedia =
watchlistRequest.mediaType === MediaType.MOVIE
? await tmdb.getMovie({ movieId: watchlistRequest.tmdbId })
: await tmdb.getTvShow({ tvId: watchlistRequest.tmdbId });
let media: Media | null;
const existing = await watchlistRepository
.createQueryBuilder('watchlist')
.leftJoinAndSelect('watchlist.requestedBy', 'user')
.where('user.id = :userId', { userId: user.id })
.andWhere('watchlist.tmdbId = :tmdbId', {
tmdbId: watchlistRequest.tmdbId,
})
.andWhere('watchlist.mediaType = :mediaType', {
mediaType: watchlistRequest.mediaType,
})
.getMany();
if (watchlistRequest.mediaType === MediaType.MUSIC) {
if (!watchlistRequest.mbId) {
throw new Error('MusicBrainz ID is required for music media type');
}
if (existing && existing.length > 0) {
logger.warn('Duplicate request for watchlist blocked', {
tmdbId: watchlistRequest.tmdbId,
mediaType: watchlistRequest.mediaType,
label: 'Watchlist',
const existing = await watchlistRepository
.createQueryBuilder('watchlist')
.leftJoinAndSelect('watchlist.requestedBy', 'user')
.where('user.id = :userId', { userId: user.id })
.andWhere('watchlist.mbId = :mbId', { mbId: watchlistRequest.mbId })
.andWhere('watchlist.mediaType = :mediaType', {
mediaType: watchlistRequest.mediaType,
})
.getMany();
if (existing && existing.length > 0) {
logger.warn('Duplicate request for watchlist blocked', {
mbId: watchlistRequest.mbId,
mediaType: watchlistRequest.mediaType,
label: 'Watchlist',
});
throw new DuplicateWatchlistRequestError();
}
media = await mediaRepository.findOne({
where: { mbId: watchlistRequest.mbId, mediaType: MediaType.MUSIC },
});
throw new DuplicateWatchlistRequestError();
}
if (!media) {
media = new Media({
mbId: watchlistRequest.mbId,
mediaType: MediaType.MUSIC,
});
}
} else {
// For movies/TV, validate tmdbId exists
if (!watchlistRequest.tmdbId) {
throw new Error('TMDB ID is required for movie/TV media types');
}
let media = await mediaRepository.findOne({
where: {
tmdbId: watchlistRequest.tmdbId,
mediaType: watchlistRequest.mediaType,
},
});
const tmdbMedia =
watchlistRequest.mediaType === MediaType.MOVIE
? await tmdb.getMovie({ movieId: watchlistRequest.tmdbId })
: await tmdb.getTvShow({ tvId: watchlistRequest.tmdbId });
if (!media) {
media = new Media({
tmdbId: tmdbMedia.id,
tvdbId: tmdbMedia.external_ids.tvdb_id,
mediaType: watchlistRequest.mediaType,
const existing = await watchlistRepository
.createQueryBuilder('watchlist')
.leftJoinAndSelect('watchlist.requestedBy', 'user')
.where('user.id = :userId', { userId: user.id })
.andWhere('watchlist.tmdbId = :tmdbId', {
tmdbId: watchlistRequest.tmdbId,
})
.andWhere('watchlist.mediaType = :mediaType', {
mediaType: watchlistRequest.mediaType,
})
.getMany();
if (existing && existing.length > 0) {
logger.warn('Duplicate request for watchlist blocked', {
tmdbId: watchlistRequest.tmdbId,
mediaType: watchlistRequest.mediaType,
label: 'Watchlist',
});
throw new DuplicateWatchlistRequestError();
}
media = await mediaRepository.findOne({
where: {
tmdbId: watchlistRequest.tmdbId,
mediaType: watchlistRequest.mediaType,
},
});
if (!media) {
media = new Media({
tmdbId: tmdbMedia.id,
tvdbId: tmdbMedia.external_ids.tvdb_id,
mediaType: watchlistRequest.mediaType,
});
}
}
const watchlist = new this({
@@ -139,14 +188,19 @@ export class Watchlist implements WatchlistItem {
}
public static async deleteWatchlist(
tmdbId: Watchlist['tmdbId'],
id: Watchlist['tmdbId'] | Watchlist['mbId'],
user: User
): Promise<Watchlist | null> {
const watchlistRepository = getRepository(this);
const watchlist = await watchlistRepository.findOneBy({
tmdbId,
requestedBy: { id: user.id },
});
// Check if the ID is a number (TMDB) or string (MusicBrainz)
const whereClause =
typeof id === 'number'
? { tmdbId: id, requestedBy: { id: user.id } }
: { mbId: id, requestedBy: { id: user.id } };
const watchlist = await watchlistRepository.findOneBy(whereClause);
if (!watchlist) {
throw new NotFoundError('not Found');
}

View File

@@ -22,7 +22,9 @@ import logger from '@server/logger';
import clearCookies from '@server/middleware/clearcookies';
import routes from '@server/routes';
import avatarproxy from '@server/routes/avatarproxy';
import imageproxy from '@server/routes/imageproxy';
import caaproxy from '@server/routes/caaproxy';
import tadbproxy from '@server/routes/tadbproxy';
import tmdbproxy from '@server/routes/tmdbproxy';
import { appDataPermissions } from '@server/utils/appDataVolume';
import { getAppVersion } from '@server/utils/appVersion';
import createCustomProxyAgent from '@server/utils/customProxyAgent';
@@ -235,8 +237,10 @@ app
server.use('/api/v1', routes);
// Do not set cookies so CDNs can cache them
server.use('/imageproxy', clearCookies, imageproxy);
server.use('/tmdbproxy', clearCookies, tmdbproxy);
server.use('/avatarproxy', clearCookies, avatarproxy);
server.use('/caaproxy', clearCookies, caaproxy);
server.use('/tadbproxy', clearCookies, tadbproxy);
server.get('*', (req, res) => handle(req, res));
server.use(

View File

@@ -2,8 +2,9 @@ import type { User } from '@server/entity/User';
import type { PaginatedResponse } from '@server/interfaces/api/common';
export interface BlacklistItem {
tmdbId: number;
mediaType: 'movie' | 'tv';
tmdbId?: number;
mbId?: string;
mediaType: 'movie' | 'tv' | 'music';
title?: string;
createdAt?: Date;
user?: User;

View File

@@ -7,8 +7,9 @@ export interface GenreSliderItem {
export interface WatchlistItem {
id: number;
ratingKey: string;
tmdbId: number;
mediaType: 'movie' | 'tv';
tmdbId?: number;
mbId?: string;
mediaType: 'movie' | 'tv' | 'music';
title: string;
}

View File

@@ -4,7 +4,7 @@ import type { LanguageProfile } from '@server/api/servarr/sonarr';
export interface ServiceCommonServer {
id: number;
name: string;
is4k: boolean;
is4k?: boolean;
isDefault: boolean;
activeProfileId: number;
activeDirectory: string;

View File

@@ -64,7 +64,10 @@ export interface CacheItem {
export interface CacheResponse {
apiCaches: CacheItem[];
imageCache: Record<'tmdb' | 'avatar', { size: number; imageCount: number }>;
imageCache: Record<
'tmdb' | 'avatar' | 'caa' | 'tadb',
{ size: number; imageCount: number }
>;
dnsCache: {
stats: DnsStats | undefined;
entries: DnsEntries | undefined;

View File

@@ -22,6 +22,7 @@ export interface QuotaStatus {
export interface QuotaResponse {
movie: QuotaStatus;
tv: QuotaStatus;
music: QuotaStatus;
}
export interface UserWatchDataResponse {

View File

@@ -1,9 +1,15 @@
import { MediaType } from '@server/constants/media';
import { z } from 'zod';
export const watchlistCreate = z.object({
ratingKey: z.coerce.string().optional(),
tmdbId: z.coerce.number(),
mediaType: z.nativeEnum(MediaType),
title: z.coerce.string().optional(),
});
export const watchlistCreate = z
.object({
ratingKey: z.coerce.string().optional(),
mediaType: z.nativeEnum(MediaType),
title: z.coerce.string().optional(),
})
.and(
z.union([
z.object({ tmdbId: z.coerce.number() }),
z.object({ mbId: z.coerce.string() }),
])
);

View File

@@ -8,6 +8,7 @@ import {
jellyfinFullScanner,
jellyfinRecentScanner,
} from '@server/lib/scanners/jellyfin';
import { lidarrScanner } from '@server/lib/scanners/lidarr';
import { plexFullScanner, plexRecentScanner } from '@server/lib/scanners/plex';
import { radarrScanner } from '@server/lib/scanners/radarr';
import { sonarrScanner } from '@server/lib/scanners/sonarr';
@@ -172,6 +173,21 @@ export const startJobs = (): void => {
cancelFn: () => sonarrScanner.cancel(),
});
// Run full lidarr scan every 24 hours
scheduledJobs.push({
id: 'lidarr-scan',
name: 'Lidarr Scan',
type: 'process',
interval: 'hours',
cronSchedule: jobs['lidarr-scan'].schedule,
job: schedule.scheduleJob(jobs['lidarr-scan'].schedule, () => {
logger.info('Starting scheduled job: lidarr Scan', { label: 'Jobs' });
lidarrScanner.run();
}),
running: () => lidarrScanner.status().running,
cancelFn: () => lidarrScanner.cancel(),
});
// Checks if media is still available in plex/sonarr/radarr libs
scheduledJobs.push({
id: 'availability-sync',

View File

@@ -2,6 +2,7 @@ import type { JellyfinLibraryItem } from '@server/api/jellyfin';
import JellyfinAPI from '@server/api/jellyfin';
import type { PlexMetadata } from '@server/api/plexapi';
import PlexAPI from '@server/api/plexapi';
import LidarrAPI, { type LidarrAlbum } from '@server/api/servarr/lidarr';
import RadarrAPI, { type RadarrMovie } from '@server/api/servarr/radarr';
import type { SonarrSeason, SonarrSeries } from '@server/api/servarr/sonarr';
import SonarrAPI from '@server/api/servarr/sonarr';
@@ -12,7 +13,11 @@ import Media from '@server/entity/Media';
import MediaRequest from '@server/entity/MediaRequest';
import type Season from '@server/entity/Season';
import { User } from '@server/entity/User';
import type { RadarrSettings, SonarrSettings } from '@server/lib/settings';
import type {
LidarrSettings,
RadarrSettings,
SonarrSettings,
} from '@server/lib/settings';
import { getSettings } from '@server/lib/settings';
import logger from '@server/logger';
import { getHostname } from '@server/utils/getHostname';
@@ -28,6 +33,7 @@ class AvailabilitySync {
private sonarrSeasonsCache: Record<string, SonarrSeason[]>;
private radarrServers: RadarrSettings[];
private sonarrServers: SonarrSettings[];
private lidarrServers: LidarrSettings[];
async run() {
const settings = getSettings();
@@ -38,6 +44,7 @@ class AvailabilitySync {
this.sonarrSeasonsCache = {};
this.radarrServers = settings.radarr.filter((server) => server.syncEnabled);
this.sonarrServers = settings.sonarr.filter((server) => server.syncEnabled);
this.lidarrServers = settings.lidarr.filter((server) => server.syncEnabled);
try {
logger.info(`Starting availability sync...`, {
@@ -451,6 +458,47 @@ class AvailabilitySync {
);
}
}
if (media.mediaType === 'music') {
let musicExists = false;
const existsInLidarr = await this.mediaExistsInLidarr(media);
// Check media server existence (Plex/Jellyfin/Emby)
if (mediaServerType === MediaServerType.PLEX) {
const { existsInPlex } = await this.mediaExistsInPlex(media, false);
if (existsInPlex || existsInLidarr) {
musicExists = true;
logger.info(
`The album [Foreign ID ${media.mbId}] still exists. Preventing removal.`,
{
label: 'AvailabilitySync',
}
);
}
} else if (
mediaServerType === MediaServerType.JELLYFIN ||
mediaServerType === MediaServerType.EMBY
) {
const { existsInJellyfin } = await this.mediaExistsInJellyfin(
media,
false
);
if (existsInJellyfin || existsInLidarr) {
musicExists = true;
logger.info(
`The album [Foreign ID ${media.mbId}] still exists. Preventing removal.`,
{
label: 'AvailabilitySync',
}
);
}
}
if (!musicExists && media.status === MediaStatus.AVAILABLE) {
await this.mediaUpdater(media, false, mediaServerType);
}
}
}
} catch (ex) {
logger.error('Failed to complete availability sync.', {
@@ -558,11 +606,23 @@ class AvailabilitySync {
? media[is4k ? 'jellyfinMediaId4k' : 'jellyfinMediaId']
: null;
}
// Update log message to include music media type
logger.info(
`The ${is4k ? '4K' : 'non-4K'} ${
media.mediaType === 'movie' ? 'movie' : 'show'
} [TMDB ID ${media.tmdbId}] was not found in any ${
media.mediaType === 'movie' ? 'Radarr' : 'Sonarr'
media.mediaType === 'movie'
? 'movie'
: media.mediaType === 'tv'
? 'show'
: 'album'
} [${media.mediaType === 'music' ? 'Foreign ID' : 'TMDB ID'} ${
media.mediaType === 'music' ? media.mbId : media.tmdbId
}] was not found in any ${
media.mediaType === 'movie'
? 'Radarr'
: media.mediaType === 'tv'
? 'Sonarr'
: 'Lidarr'
} and ${
mediaServerType === MediaServerType.PLEX
? 'plex'
@@ -577,8 +637,14 @@ class AvailabilitySync {
} catch (ex) {
logger.debug(
`Failure updating the ${is4k ? '4K' : 'non-4K'} ${
media.mediaType === 'tv' ? 'show' : 'movie'
} [TMDB ID ${media.tmdbId}].`,
media.mediaType === 'movie'
? 'movie'
: media.mediaType === 'tv'
? 'show'
: 'album'
} [${media.mediaType === 'music' ? 'Foreign ID' : 'TMDB ID'} ${
media.mediaType === 'music' ? media.mbId : media.tmdbId
}].`,
{
errorMessage: ex.message,
label: 'Availability Sync',
@@ -838,6 +904,51 @@ class AvailabilitySync {
return seasonExists;
}
private async mediaExistsInLidarr(media: Media): Promise<boolean> {
let existsInLidarr = false;
// Check for availability in all configured Lidarr servers
// If any find the media, we will assume the media exists
for (const server of this.lidarrServers) {
const lidarrAPI = new LidarrAPI({
apiKey: server.apiKey,
url: LidarrAPI.buildUrl(server, '/api/v1'),
});
try {
let lidarr: LidarrAlbum | undefined;
if (media.externalServiceId) {
lidarr = await lidarrAPI.getAlbum({
id: media.externalServiceId,
});
}
if (
lidarr?.statistics &&
lidarr.statistics.totalTrackCount > 0 &&
lidarr.statistics.trackFileCount === lidarr.statistics.totalTrackCount
) {
existsInLidarr = true;
break;
}
} catch (ex) {
if (!ex.message.includes('404')) {
existsInLidarr = true;
logger.debug(
`Failed to retrieve album [Foreign ID ${media.mbId}] from Lidarr.`,
{
errorMessage: ex.message,
label: 'AvailabilitySync',
}
);
}
}
}
return existsInLidarr;
}
// Plex
private async mediaExistsInPlex(
media: Media,
@@ -881,8 +992,14 @@ class AvailabilitySync {
preventSeasonSearch = true;
logger.debug(
`Failure retrieving the ${is4k ? '4K' : 'non-4K'} ${
media.mediaType === 'tv' ? 'show' : 'movie'
} [TMDB ID ${media.tmdbId}] from Plex.`,
media.mediaType === 'movie'
? 'movie'
: media.mediaType === 'tv'
? 'show'
: 'album'
} [${media.mediaType === 'music' ? 'Foreign ID' : 'TMDB ID'} ${
media.mediaType === 'music' ? media.mbId : media.tmdbId
}] from Plex.`,
{
errorMessage: ex.message,
label: 'Availability Sync',
@@ -993,13 +1110,19 @@ class AvailabilitySync {
existsInJellyfin = true;
}
} catch (ex) {
if (!ex.message.includes('404' || '500')) {
if (!ex.message.includes('404') && !ex.message.includes('500')) {
existsInJellyfin = false;
preventSeasonSearch = true;
logger.debug(
`Failure retrieving the ${is4k ? '4K' : 'non-4K'} ${
media.mediaType === 'tv' ? 'show' : 'movie'
} [TMDB ID ${media.tmdbId}] from Jellyfin.`,
media.mediaType === 'movie'
? 'movie'
: media.mediaType === 'tv'
? 'show'
: 'album'
} [${media.mediaType === 'music' ? 'Foreign ID' : 'TMDB ID'} ${
media.mediaType === 'music' ? media.mbId : media.tmdbId
}] from Jellyfin.`,
{
errorMessage: ex.message,
label: 'AvailabilitySync',

View File

@@ -2,8 +2,13 @@ import NodeCache from 'node-cache';
export type AvailableCacheIds =
| 'tmdb'
| 'musicbrainz'
| 'listenbrainz'
| 'covertartarchive'
| 'tadb'
| 'radarr'
| 'sonarr'
| 'lidarr'
| 'rt'
| 'imdb'
| 'github'
@@ -48,8 +53,25 @@ class CacheManager {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
musicbrainz: new Cache('musicbrainz', 'MusicBrainz API', {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
listenbrainz: new Cache('listenbrainz', 'ListenBrainz API', {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
covertartarchive: new Cache('covertartarchive', 'CovertArtArchive API', {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
tadb: new Cache('tadb', 'The Audio Database API', {
stdTtl: 21600,
checkPeriod: 60 * 30,
}),
radarr: new Cache('radarr', 'Radarr API'),
sonarr: new Cache('sonarr', 'Sonarr API'),
lidarr: new Cache('lidarr', 'Lidarr API'),
rt: new Cache('rt', 'Rotten Tomatoes API', {
stdTtl: 43200,
checkPeriod: 60 * 30,

View File

@@ -1,3 +1,4 @@
import LidarrAPI from '@server/api/servarr/lidarr';
import RadarrAPI from '@server/api/servarr/radarr';
import SonarrAPI from '@server/api/servarr/sonarr';
import { MediaType } from '@server/constants/media';
@@ -27,6 +28,7 @@ export interface DownloadingItem {
class DownloadTracker {
private radarrServers: Record<number, DownloadingItem[]> = {};
private sonarrServers: Record<number, DownloadingItem[]> = {};
private lidarrServers: Record<number, DownloadingItem[]> = {};
public getMovieProgress(
serverId: number,
@@ -54,13 +56,28 @@ class DownloadTracker {
);
}
public getMusicProgress(
serverId: number,
externalServiceId: number
): DownloadingItem[] {
if (!this.lidarrServers[serverId]) {
return [];
}
return this.lidarrServers[serverId].filter(
(item) => item.externalId === externalServiceId
);
}
public async resetDownloadTracker() {
this.radarrServers = {};
this.sonarrServers = {};
}
public updateDownloads() {
this.updateRadarrDownloads();
this.updateSonarrDownloads();
this.updateLidarrDownloads();
}
private async updateRadarrDownloads() {
@@ -219,6 +236,84 @@ class DownloadTracker {
})
);
}
private async updateLidarrDownloads() {
const settings = getSettings();
// Remove duplicate servers
const filteredServers = uniqWith(settings.lidarr, (lidarrA, lidarrB) => {
return (
lidarrA.hostname === lidarrB.hostname &&
lidarrA.port === lidarrB.port &&
lidarrA.baseUrl === lidarrB.baseUrl
);
});
// Load downloads from Lidarr servers
Promise.all(
filteredServers.map(async (server) => {
if (server.syncEnabled) {
const lidarr = new LidarrAPI({
apiKey: server.apiKey,
url: LidarrAPI.buildUrl(server, '/api/v1'),
});
try {
await lidarr.refreshMonitoredDownloads();
const queueItems = await lidarr.getQueue();
this.lidarrServers[server.id] = queueItems.map((item) => ({
externalId: item.albumId,
estimatedCompletionTime: new Date(item.estimatedCompletionTime),
mediaType: MediaType.MUSIC,
size: item.size,
sizeLeft: item.sizeleft,
status: item.status,
timeLeft: item.timeleft,
title: item.title,
downloadId: item.downloadId,
}));
if (queueItems.length > 0) {
logger.debug(
`Found ${queueItems.length} item(s) in progress on Lidarr server: ${server.name}`,
{ label: 'Download Tracker' }
);
}
} catch {
logger.error(
`Unable to get queue from Lidarr server: ${server.name}`,
{
label: 'Download Tracker',
}
);
}
// Duplicate this data to matching servers
const matchingServers = settings.lidarr.filter(
(ls) =>
ls.hostname === server.hostname &&
ls.port === server.port &&
ls.baseUrl === server.baseUrl &&
ls.id !== server.id
);
if (matchingServers.length > 0) {
logger.debug(
`Matching download data to ${matchingServers.length} other Lidarr server(s)`,
{ label: 'Download Tracker' }
);
}
matchingServers.forEach((ms) => {
if (ms.syncEnabled) {
this.lidarrServers[ms.id] = this.lidarrServers[server.id];
}
});
}
})
);
}
}
const downloadTracker = new DownloadTracker();

View File

@@ -7,8 +7,8 @@ import type { NotificationAgentEmail } from '@server/lib/settings';
import { getSettings, NotificationAgentKey } from '@server/lib/settings';
import logger from '@server/logger';
import type { EmailOptions } from 'email-templates';
import * as EmailValidator from 'email-validator';
import path from 'path';
import validator from 'validator';
import { Notification, shouldSendAdminNotification } from '..';
import type { NotificationAgent, NotificationPayload } from './agent';
import { BaseAgent } from './agent';
@@ -71,7 +71,9 @@ class EmailAgent
const mediaType = payload.media
? payload.media.mediaType === MediaType.MOVIE
? 'movie'
: 'series'
: payload.media.mediaType === MediaType.TV
? 'series'
: 'album'
: undefined;
const is4k = payload.request?.is4k;
@@ -113,7 +115,11 @@ class EmailAgent
body = `A request for the following ${mediaType} ${
is4k ? 'in 4K ' : ''
}failed to be added to ${
payload.media?.mediaType === MediaType.MOVIE ? 'Radarr' : 'Sonarr'
payload.media?.mediaType === MediaType.MOVIE
? 'Radarr'
: payload.media?.mediaType === MediaType.TV
? 'Sonarr'
: 'Lidarr'
}:`;
break;
}
@@ -135,7 +141,11 @@ class EmailAgent
timestamp: new Date().toTimeString(),
requestedBy: payload.request.requestedBy.displayName,
actionUrl: applicationUrl
? `${applicationUrl}/${payload.media?.mediaType}/${payload.media?.tmdbId}`
? `${applicationUrl}/${payload.media?.mediaType}/${
payload.media?.mediaType === MediaType.MUSIC
? payload.media?.mbId
: payload.media?.tmdbId
}`
: undefined,
applicationUrl,
applicationTitle,
@@ -221,7 +231,9 @@ class EmailAgent
this.getSettings(),
payload.notifyUser.settings?.pgpKey
);
if (EmailValidator.validate(payload.notifyUser.email)) {
if (
validator.isEmail(payload.notifyUser.email, { require_tld: false })
) {
await email.send(
this.buildMessage(
type,
@@ -283,7 +295,7 @@ class EmailAgent
this.getSettings(),
user.settings?.pgpKey
);
if (EmailValidator.validate(user.email)) {
if (validator.isEmail(user.email, { require_tld: false })) {
await email.send(
this.buildMessage(type, payload, user.email, user.displayName)
);

View File

@@ -47,7 +47,9 @@ class WebPushAgent
const mediaType = payload.media
? payload.media.mediaType === MediaType.MOVIE
? 'movie'
: 'series'
: payload.media.mediaType === MediaType.TV
? 'series'
: 'album'
: undefined;
const is4k = payload.request?.is4k;
@@ -119,7 +121,9 @@ class WebPushAgent
const actionUrl = payload.issue
? `/issues/${payload.issue.id}`
: payload.media
? `/${payload.media.mediaType}/${payload.media.tmdbId}`
? payload.media.mediaType === MediaType.MUSIC
? `/music/${payload.media.mbId}`
: `/${payload.media.mediaType}/${payload.media.tmdbId}`
: undefined;
const actionUrlTitle = actionUrl

View File

@@ -97,6 +97,13 @@ const checkOverseerrMerge = async (): Promise<boolean> => {
media.status = 7;
await mediaRepository.save(media);
}
const media4kToUpdate = await mediaRepository.find({
where: { status4k: 6 },
});
for (const media of media4kToUpdate) {
media.status4k = 7;
await mediaRepository.save(media);
}
} catch (error) {
logger.error('Failed to update Media status from Blacklisted to Deleted', {
label: 'Seerr Migration',

View File

@@ -29,6 +29,9 @@ export enum Permission {
WATCHLIST_VIEW = 134217728,
MANAGE_BLACKLIST = 268435456,
VIEW_BLACKLIST = 1073741824,
AUTO_APPROVE_MUSIC = 2147483648,
REQUEST_MUSIC = 4294967296,
AUTO_REQUEST_MUSIC = 8589934592,
}
export interface PermissionCheckOptions {

View File

@@ -28,6 +28,7 @@ export interface MediaIds {
imdbId?: string;
tvdbId?: number;
isHama?: boolean;
mbId?: string;
}
interface ProcessOptions {
@@ -79,11 +80,24 @@ class BaseScanner<T> {
this.updateRate = updateRate ?? UPDATE_RATE;
}
private async getExisting(tmdbId: number, mediaType: MediaType) {
private async getExisting(
id: number | string,
mediaType: MediaType
): Promise<Media | null> {
const mediaRepository = getRepository(Media);
const query: Record<string, any> = {
mediaType,
};
if (mediaType === MediaType.MUSIC) {
query.mbId = id.toString();
} else {
query.tmdbId = Number(id);
}
const existing = await mediaRepository.findOne({
where: { tmdbId: tmdbId, mediaType },
where: query,
});
return existing;
@@ -526,6 +540,93 @@ class BaseScanner<T> {
});
}
protected async processMusic(
mbId: string,
{
serviceId,
externalServiceId,
externalServiceSlug,
mediaAddedAt,
ratingKey,
processing = false,
title = 'Unknown Title',
}: ProcessOptions = {}
): Promise<void> {
const mediaRepository = getRepository(Media);
await this.asyncLock.dispatch(mbId, async () => {
const existing = await mediaRepository.findOne({
where: { mbId, mediaType: MediaType.MUSIC },
});
if (!existing) {
const newMedia = new Media();
newMedia.mbId = mbId;
newMedia.status = processing
? MediaStatus.PROCESSING
: MediaStatus.AVAILABLE;
newMedia.mediaType = MediaType.MUSIC;
newMedia.mediaAddedAt = mediaAddedAt ?? newMedia.mediaAddedAt;
newMedia.ratingKey = ratingKey ?? newMedia.ratingKey;
newMedia.serviceId = serviceId ?? newMedia.serviceId;
newMedia.externalServiceId =
externalServiceId ?? newMedia.externalServiceId;
newMedia.externalServiceSlug =
externalServiceSlug ?? newMedia.externalServiceSlug;
try {
await mediaRepository.save(newMedia);
this.log(`Saved new media: ${title}`);
} catch (err) {
this.log('Failed to save new media', 'error', {
title,
error: err.message,
});
}
} else {
let hasChanges = false;
if (existing.status !== MediaStatus.AVAILABLE && !processing) {
existing.status = MediaStatus.AVAILABLE;
hasChanges = true;
}
if (serviceId && !existing.serviceId) {
existing.serviceId = serviceId;
hasChanges = true;
}
if (externalServiceId && !existing.externalServiceId) {
existing.externalServiceId = externalServiceId;
hasChanges = true;
}
if (externalServiceSlug && !existing.externalServiceSlug) {
existing.externalServiceSlug = externalServiceSlug;
hasChanges = true;
}
if (mediaAddedAt && !existing.mediaAddedAt) {
existing.mediaAddedAt = mediaAddedAt;
hasChanges = true;
}
if (ratingKey && !existing.ratingKey) {
existing.ratingKey = ratingKey;
hasChanges = true;
}
if (hasChanges) {
try {
await mediaRepository.save(existing);
this.log(`Updated existing media: ${title}`);
} catch (err) {
this.log('Failed to update existing media', 'error', {
title,
error: err.message,
});
}
}
}
});
}
/**
* Call startRun from child class whenever a run is starting to
* ensure required values are set

View File

@@ -2,6 +2,7 @@ import animeList from '@server/api/animelist';
import type { JellyfinLibraryItem } from '@server/api/jellyfin';
import JellyfinAPI from '@server/api/jellyfin';
import { getMetadataProvider } from '@server/api/metadata';
import MusicBrainz from '@server/api/musicbrainz';
import TheMovieDb from '@server/api/themoviedb';
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
import type {
@@ -374,9 +375,10 @@ class JellyfinScanner {
) ?? []
).length;
const jellyfinSeasons = await this.jfClient.getSeasons(Id);
for (const season of seasons) {
const JellyfinSeasons = await this.jfClient.getSeasons(Id);
const matchedJellyfinSeason = JellyfinSeasons.find((md) => {
const matchedJellyfinSeason = jellyfinSeasons.find((md) => {
if (tvdbSeasonFromAnidb) {
// In AniDB we don't have the concept of seasons,
// we have multiple shows with only Season 1 (and sometimes a season with index 0 for specials).
@@ -397,38 +399,52 @@ class JellyfinScanner {
// Check if we found the matching season and it has all the available episodes
if (matchedJellyfinSeason) {
// If we have a matched Jellyfin season, get its children metadata so we can check details
const episodes = await this.jfClient.getEpisodes(
Id,
matchedJellyfinSeason.Id
);
//Get count of episodes that are HD and 4K
let totalStandard = 0;
let total4k = 0;
//use for loop to make sure this loop _completes_ in full
//before the next section
for (const episode of episodes) {
let episodeCount = 1;
if (!this.enable4kShow) {
const episodes = await this.jfClient.getEpisodes(
Id,
matchedJellyfinSeason.Id
);
// count number of combined episodes
if (
episode.IndexNumber !== undefined &&
episode.IndexNumberEnd !== undefined
) {
episodeCount =
episode.IndexNumberEnd - episode.IndexNumber + 1;
}
for (const episode of episodes) {
let episodeCount = 1;
// count number of combined episodes
if (
episode.IndexNumber !== undefined &&
episode.IndexNumberEnd !== undefined
) {
episodeCount =
episode.IndexNumberEnd - episode.IndexNumber + 1;
}
if (!this.enable4kShow) {
totalStandard += episodeCount;
} else {
const ExtendedEpisodeData = await this.jfClient.getItemData(
episode.Id
);
}
} else {
// 4K detection enabled - request media info to check resolution
const episodes = await this.jfClient.getEpisodes(
Id,
matchedJellyfinSeason.Id,
{ includeMediaInfo: true }
);
ExtendedEpisodeData?.MediaSources?.some((MediaSource) => {
for (const episode of episodes) {
let episodeCount = 1;
// count number of combined episodes
if (
episode.IndexNumber !== undefined &&
episode.IndexNumberEnd !== undefined
) {
episodeCount =
episode.IndexNumberEnd - episode.IndexNumber + 1;
}
// MediaSources field is included in response when includeMediaInfo is true
// We iterate all MediaSources to detect if episode has both standard AND 4K versions
episode.MediaSources?.some((MediaSource) => {
return MediaSource.MediaStreams.some((MediaStream) => {
if (MediaStream.Type === 'Video') {
if ((MediaStream.Width ?? 0) >= 2000) {
@@ -676,6 +692,106 @@ class JellyfinScanner {
}
}
private async processMusic(jellyfinitem: JellyfinLibraryItem) {
const mediaRepository = getRepository(Media);
const musicBrainz = new MusicBrainz();
try {
const metadata = await this.jfClient.getItemData(jellyfinitem.Id);
const newMedia = new Media();
if (!metadata?.Id) {
logger.debug('No Id metadata for this title. Skipping', {
label: 'Jellyfin Sync',
ratingKey: jellyfinitem.Id,
});
return;
}
newMedia.mbId = metadata.ProviderIds?.MusicBrainzReleaseGroup;
if (!newMedia.mbId && metadata.ProviderIds?.MusicBrainzAlbum) {
try {
const releaseGroupId = await musicBrainz.getReleaseGroup({
releaseId: metadata.ProviderIds.MusicBrainzAlbum,
});
if (releaseGroupId) {
newMedia.mbId = releaseGroupId;
}
} catch (e) {
this.log('Failed to get release group ID', 'error', {
title: metadata.Name,
releaseId: metadata.ProviderIds.MusicBrainzAlbum,
error: e.message,
});
}
}
if (!newMedia.mbId) {
this.log(
'No MusicBrainz Album ID found for this title. Skipping.',
'debug',
{
title: metadata.Name,
}
);
return;
}
await this.asyncLock.dispatch(metadata.Id, async () => {
const existing = await mediaRepository.findOne({
where: { mbId: newMedia.mbId, mediaType: MediaType.MUSIC },
});
if (existing) {
let changedExisting = false;
if (existing.status !== MediaStatus.AVAILABLE) {
existing.status = MediaStatus.AVAILABLE;
existing.mediaAddedAt = new Date(metadata.DateCreated ?? '');
changedExisting = true;
}
if (!existing.mediaAddedAt && !changedExisting) {
existing.mediaAddedAt = new Date(metadata.DateCreated ?? '');
changedExisting = true;
}
if (existing.jellyfinMediaId !== metadata.Id) {
existing.jellyfinMediaId = metadata.Id;
changedExisting = true;
}
if (changedExisting) {
await mediaRepository.save(existing);
this.log(
`Request for ${metadata.Name} exists. New media set to AVAILABLE`,
'info'
);
} else {
this.log(`Album already exists: ${metadata.Name}`);
}
} else {
newMedia.status = MediaStatus.AVAILABLE;
newMedia.mediaType = MediaType.MUSIC;
newMedia.mediaAddedAt = new Date(metadata.DateCreated ?? '');
newMedia.jellyfinMediaId = metadata.Id;
await mediaRepository.save(newMedia);
this.log(`Saved new album: ${metadata.Name}`);
}
});
} catch (e) {
this.log(
`Failed to process Jellyfin item, id: ${jellyfinitem.Id}`,
'error',
{
errorMessage: e.message,
jellyfinitem,
}
);
}
}
private async processItems(slicedItems: JellyfinLibraryItem[]) {
this.processedAnidbSeason = new Map();
await Promise.all(
@@ -684,6 +800,8 @@ class JellyfinScanner {
await this.processMovie(item);
} else if (item.Type === 'Series') {
await this.processShow(item);
} else if (item.Type === 'MusicAlbum') {
await this.processMusic(item);
}
})
);

View File

@@ -0,0 +1,118 @@
import type { LidarrAlbum } from '@server/api/servarr/lidarr';
import LidarrAPI from '@server/api/servarr/lidarr';
import type {
RunnableScanner,
StatusBase,
} from '@server/lib/scanners/baseScanner';
import BaseScanner from '@server/lib/scanners/baseScanner';
import type { LidarrSettings } from '@server/lib/settings';
import { getSettings } from '@server/lib/settings';
import { uniqWith } from 'lodash';
type SyncStatus = StatusBase & {
currentServer: LidarrSettings;
servers: LidarrSettings[];
};
class LidarrScanner
extends BaseScanner<LidarrAlbum>
implements RunnableScanner<SyncStatus>
{
private servers: LidarrSettings[];
private currentServer: LidarrSettings;
private lidarrApi: LidarrAPI;
constructor() {
super('Lidarr Scan', { bundleSize: 50 });
}
public status(): SyncStatus {
return {
running: this.running,
progress: this.progress,
total: this.items.length,
currentServer: this.currentServer,
servers: this.servers,
};
}
public async run(): Promise<void> {
const settings = getSettings();
const sessionId = this.startRun();
try {
this.servers = uniqWith(settings.lidarr, (lidarrA, lidarrB) => {
return (
lidarrA.hostname === lidarrB.hostname &&
lidarrA.port === lidarrB.port &&
lidarrA.baseUrl === lidarrB.baseUrl
);
});
for (const server of this.servers) {
this.currentServer = server;
if (server.syncEnabled) {
this.log(
`Beginning to process Lidarr server: ${server.name}`,
'info'
);
this.lidarrApi = new LidarrAPI({
apiKey: server.apiKey,
url: LidarrAPI.buildUrl(server, '/api/v1'),
});
this.items = await this.lidarrApi.getAlbums();
await this.loop(this.processLidarrAlbum.bind(this), { sessionId });
} else {
this.log(`Sync not enabled. Skipping Lidarr server: ${server.name}`);
}
}
this.log('Lidarr scan complete', 'info');
} catch (e) {
this.log('Scan interrupted', 'error', { errorMessage: e.message });
} finally {
this.endRun(sessionId);
}
}
private async processLidarrAlbum(lidarrAlbum: LidarrAlbum): Promise<void> {
try {
if (!lidarrAlbum.monitored) {
return;
}
const mbId = lidarrAlbum.foreignAlbumId;
if (!mbId) {
this.log(
'No MusicBrainz ID found for this title. Skipping item.',
'debug',
{
title: lidarrAlbum.title,
}
);
return;
}
await this.processMusic(mbId, {
serviceId: this.currentServer.id,
externalServiceId: lidarrAlbum.id,
externalServiceSlug: mbId,
title: lidarrAlbum.title,
processing:
lidarrAlbum.monitored &&
(!lidarrAlbum.statistics ||
lidarrAlbum.statistics.trackFileCount <
lidarrAlbum.statistics.totalTrackCount),
});
} catch (e) {
this.log('Failed to process Lidarr media', 'error', {
errorMessage: e.message,
title: lidarrAlbum.title,
});
}
}
}
export const lidarrScanner = new LidarrScanner();

View File

@@ -1,5 +1,6 @@
import animeList from '@server/api/animelist';
import { getMetadataProvider } from '@server/api/metadata';
import MusicBrainz from '@server/api/musicbrainz';
import type { PlexLibraryItem, PlexMetadata } from '@server/api/plexapi';
import PlexAPI from '@server/api/plexapi';
import TheMovieDb from '@server/api/themoviedb';
@@ -26,6 +27,7 @@ const imdbRegex = new RegExp(/imdb:\/\/(tt[0-9]+)/);
const tmdbRegex = new RegExp(/tmdb:\/\/([0-9]+)/);
const tvdbRegex = new RegExp(/tvdb:\/\/([0-9]+)/);
const tmdbShowRegex = new RegExp(/themoviedb:\/\/([0-9]+)/);
const mbRegex = new RegExp(/mbid:\/\/([0-9a-f-]+)/);
const plexRegex = new RegExp(/plex:\/\//);
// Hama agent uses ASS naming, see details here:
// https://github.com/ZeroQI/Absolute-Series-Scanner/blob/master/README.md#forcing-the-movieseries-id
@@ -95,6 +97,7 @@ class PlexScanner
'info',
{ lastScan: library.lastScan }
);
const mappedType = library.type === 'music' ? 'album' : library.type;
const libraryItems = await this.plexClient.getRecentlyAdded(
library.id,
library.lastScan
@@ -103,7 +106,7 @@ class PlexScanner
addedAt: library.lastScan - 1000 * 60 * 10,
}
: undefined,
library.type
mappedType
);
// Bundle items up by rating keys
@@ -215,6 +218,12 @@ class PlexScanner
plexitem.type === 'season'
) {
await this.processPlexShow(plexitem);
} else if (
plexitem.type === 'artist' ||
plexitem.type === 'album' ||
plexitem.type === 'track'
) {
await this.processPlexMusic(plexitem);
}
} catch (e) {
this.log('Failed to process Plex media', 'error', {
@@ -381,6 +390,60 @@ class PlexScanner
}
}
private async processPlexMusic(plexitem: PlexLibraryItem) {
const ratingKey =
plexitem.grandparentRatingKey ??
plexitem.parentRatingKey ??
plexitem.ratingKey;
let metadata;
try {
metadata = await this.plexClient.getMetadata(ratingKey, {
includeChildren: true,
});
if (metadata.Children?.Metadata) {
const musicBrainz = new MusicBrainz();
for (const album of metadata.Children.Metadata) {
const albumMetadata = await this.plexClient.getMetadata(
album.ratingKey
);
const mbReleaseId = albumMetadata.Guid?.find((g) => {
const id = g.id.toLowerCase();
return id.startsWith('mbid://');
})?.id.replace('mbid://', '');
if (!mbReleaseId) {
this.log('No MusicBrainz ID found for album', 'debug', {
title: album.title,
artist: metadata.title,
});
continue;
}
const releaseGroupId = await musicBrainz.getReleaseGroup({
releaseId: mbReleaseId,
});
if (releaseGroupId) {
await this.processMusic(releaseGroupId, {
mediaAddedAt: new Date(album.addedAt * 1000),
ratingKey: album.ratingKey,
title: album.title,
});
}
}
}
} catch (e) {
this.log('Failed to process music media', 'error', {
errorMessage: e.message,
title: metadata?.title,
});
}
}
private async getMediaIds(plexitem: PlexLibraryItem): Promise<MediaIds> {
let mediaIds: Partial<MediaIds> = {};
// Check if item is using new plex movie/tv agent
@@ -419,6 +482,8 @@ class PlexScanner
} else if (ref.id.match(tvdbRegex)) {
const tvdbMatch = ref.id.match(tvdbRegex)?.[1];
mediaIds.tvdbId = Number(tvdbMatch);
} else if (ref.id.match(mbRegex)) {
mediaIds.mbId = ref.id.match(mbRegex)?.[1] ?? undefined;
}
});
@@ -534,6 +599,12 @@ class PlexScanner
}
}
}
// Check for MusicBrainz
} else if (plexitem.guid.match(mbRegex)) {
const mbMatch = plexitem.guid.match(mbRegex);
if (mbMatch) {
mediaIds.mbId = mbMatch[1];
}
}
if (!mediaIds.tmdbId) {

View File

@@ -1,11 +1,16 @@
import MusicBrainz from '@server/api/musicbrainz';
import type {
MbAlbumResult,
MbArtistResult,
} from '@server/api/musicbrainz/interfaces';
import TheMovieDb from '@server/api/themoviedb';
import type {
TmdbCollectionResult,
TmdbMovieDetails,
TmdbMovieResult,
TmdbPersonDetails,
TmdbPersonResult,
TmdbSearchMovieResponse,
TmdbSearchMultiResponse,
TmdbSearchTvResponse,
TmdbTvDetails,
TmdbTvResult,
@@ -21,6 +26,19 @@ import {
isTvDetails,
} from '@server/utils/typeHelpers';
export type CombinedSearchResponse = {
page: number;
total_pages: number;
total_results: number;
results: (
| MbArtistResult
| MbAlbumResult
| TmdbMovieResult
| TmdbTvResult
| TmdbPersonResult
| TmdbCollectionResult
)[];
};
interface SearchProvider {
pattern: RegExp;
search: ({
@@ -31,7 +49,7 @@ interface SearchProvider {
id: string;
language?: string;
query?: string;
}) => Promise<TmdbSearchMultiResponse>;
}) => Promise<CombinedSearchResponse>;
}
const searchProviders: SearchProvider[] = [];
@@ -214,3 +232,39 @@ searchProviders.push({
};
},
});
searchProviders.push({
pattern: new RegExp(/(?<=musicbrainz:)/),
search: async ({ query }) => {
const musicbrainz = new MusicBrainz();
try {
const albumResults = await musicbrainz.searchAlbum({
query: query || '',
limit: 20,
});
const results: CombinedSearchResponse['results'] = albumResults.map(
(album) =>
({
...album,
media_type: 'album',
} as MbAlbumResult)
);
return {
page: 1,
total_pages: 1,
total_results: results.length,
results,
};
} catch (e) {
return {
page: 1,
total_pages: 1,
total_results: 0,
results: [],
};
}
},
});

Some files were not shown because too many files have changed in this diff Show More