mirror of
https://github.com/fallenbagel/jellyseerr.git
synced 2025-12-24 02:39:18 -05:00
Compare commits
3 Commits
preview-de
...
preview-av
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8ac21bb814 | ||
|
|
3f8ebc75d5 | ||
|
|
065d3002e0 |
@@ -642,24 +642,6 @@
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "sudo-kraken",
|
||||
"name": "Joe Harrison",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/53116754?v=4",
|
||||
"profile": "https://sudo-kraken.github.io/docs/",
|
||||
"contributions": [
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "ale183",
|
||||
"name": "ale183",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8809439?v=4",
|
||||
"profile": "https://github.com/ale183",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -4,7 +4,6 @@
|
||||
|
||||
#### To-Dos
|
||||
|
||||
- [ ] Disclosed any use of AI (see our [policy](https://github.com/fallenbagel/jellyseerr/blob/develop/CONTRIBUTING.md#ai-assistance-notice))
|
||||
- [ ] Successful build `pnpm build`
|
||||
- [ ] Translation keys `pnpm i18n:extract`
|
||||
- [ ] Database migration (if required)
|
||||
|
||||
160
.github/workflows/ci.yml
vendored
160
.github/workflows/ci.yml
vendored
@@ -7,14 +7,6 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ci-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -25,17 +17,14 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
with:
|
||||
version: 9
|
||||
- name: Get pnpm store directory
|
||||
shell: sh
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -43,144 +32,137 @@ jobs:
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
HUSKY: 0
|
||||
run: pnpm install
|
||||
|
||||
- name: Lint
|
||||
run: pnpm lint
|
||||
|
||||
- name: Formatting
|
||||
run: pnpm format:check
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
build:
|
||||
name: Build (per-arch, native runners)
|
||||
name: Build & Publish Docker Images
|
||||
if: github.ref == 'refs/heads/develop' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
- runner: ubuntu-24.04-arm
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.runner }}
|
||||
outputs:
|
||||
digest-amd64: ${{ steps.set_outputs.outputs.digest-amd64 }}
|
||||
digest-arm64: ${{ steps.set_outputs.outputs.digest-arm64 }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Warm cache (no push) — ${{ matrix.platform }}
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: false
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=develop
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
cache-from: type=gha,scope=${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
|
||||
provenance: false
|
||||
|
||||
publish:
|
||||
name: Publish multi-arch image
|
||||
needs: build
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
- name: Set lower case owner name
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository }}
|
||||
ghcr.io/${{ github.repository }}
|
||||
fallenbagel/jellyseerr
|
||||
ghcr.io/${{ env.OWNER_LC }}/jellyseerr
|
||||
tags: |
|
||||
type=raw,value=develop
|
||||
type=sha
|
||||
labels: |
|
||||
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
|
||||
- name: Build & Push (multi-arch, single tag)
|
||||
uses: docker/build-push-action@v6
|
||||
type=ref,event=branch
|
||||
type=sha,prefix=,suffix=,format=short
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: true
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=develop
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
cache-from: |
|
||||
type=gha,scope=linux/amd64
|
||||
type=gha,scope=linux/arm64
|
||||
cache-to: type=gha,mode=max
|
||||
BUILD_DATE=${{ github.event.repository.updated_at }}
|
||||
outputs: |
|
||||
type=image,push-by-digest=true,name=fallenbagel/jellyseerr,push=true
|
||||
type=image,push-by-digest=true,name=ghcr.io/${{ env.OWNER_LC }}/jellyseerr,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
|
||||
provenance: false
|
||||
- name: Set outputs
|
||||
id: set_outputs
|
||||
run: |
|
||||
platform="${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }}"
|
||||
echo "digest-${platform}=${{ steps.build.outputs.digest }}" >> $GITHUB_OUTPUT
|
||||
|
||||
merge_and_push:
|
||||
name: Create and Push Multi-arch Manifest
|
||||
needs: build
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set lower case owner name
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
- name: Create and push manifest
|
||||
run: |
|
||||
docker manifest create fallenbagel/jellyseerr:develop \
|
||||
--amend fallenbagel/jellyseerr@${{ needs.build.outputs.digest-amd64 }} \
|
||||
--amend fallenbagel/jellyseerr@${{ needs.build.outputs.digest-arm64 }}
|
||||
docker manifest push fallenbagel/jellyseerr:develop
|
||||
|
||||
# GHCR manifest
|
||||
docker manifest create ghcr.io/${{ env.OWNER_LC }}/jellyseerr:develop \
|
||||
--amend ghcr.io/${{ env.OWNER_LC }}/jellyseerr@${{ needs.build.outputs.digest-amd64 }} \
|
||||
--amend ghcr.io/${{ env.OWNER_LC }}/jellyseerr@${{ needs.build.outputs.digest-arm64 }}
|
||||
docker manifest push ghcr.io/${{ env.OWNER_LC }}/jellyseerr:develop
|
||||
|
||||
discord:
|
||||
name: Send Discord Notification
|
||||
needs: publish
|
||||
needs: merge_and_push
|
||||
if: always() && github.event_name != 'pull_request' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ ${{ needs.publish.result }} ]]; then
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=${{ needs.publish.result }}" >> $GITHUB_OUTPUT
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
|
||||
29
.github/workflows/codeql.yml
vendored
29
.github/workflows/codeql.yml
vendored
@@ -3,52 +3,39 @@ name: 'CodeQL'
|
||||
on:
|
||||
push:
|
||||
branches: ['develop']
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- 'docs/**'
|
||||
pull_request:
|
||||
branches: ['develop']
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- 'docs/**'
|
||||
schedule:
|
||||
- cron: '50 7 * * 5'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: codeql-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [actions, javascript]
|
||||
language: [javascript]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
20
.github/workflows/conflict_labeler.yml
vendored
20
.github/workflows/conflict_labeler.yml
vendored
@@ -2,24 +2,18 @@ name: Merge Conflict Labeler
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
|
||||
branches:
|
||||
- develop
|
||||
pull_request_target:
|
||||
branches: [develop]
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: merge-conflict-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
branches:
|
||||
- develop
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Labeling
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'Fallenbagel/jellyseerr' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
38
.github/workflows/cypress.yml
vendored
38
.github/workflows/cypress.yml
vendored
@@ -2,49 +2,26 @@ name: Cypress Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ['*']
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- 'docs/**'
|
||||
branches:
|
||||
- '*'
|
||||
push:
|
||||
branches: [develop]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- 'docs/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: cypress-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
cypress-run:
|
||||
name: Cypress Run
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: package.json
|
||||
|
||||
node-version: 22
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup cypress cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-cypress-store-
|
||||
|
||||
version: 9
|
||||
- name: Cypress run
|
||||
uses: cypress-io/github-action@v6
|
||||
with:
|
||||
@@ -59,7 +36,6 @@ jobs:
|
||||
# Fix test titles in cypress dashboard
|
||||
COMMIT_INFO_MESSAGE: ${{github.event.pull_request.title}}
|
||||
COMMIT_INFO_SHA: ${{github.event.pull_request.head.sha}}
|
||||
|
||||
- name: Upload video files
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
40
.github/workflows/docs-deploy.yml
vendored
40
.github/workflows/docs-deploy.yml
vendored
@@ -8,30 +8,24 @@ on:
|
||||
- 'docs/**'
|
||||
- 'gen-docs/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: pages
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docusaurus
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: package.json
|
||||
node-version: 20
|
||||
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: sh
|
||||
@@ -52,26 +46,38 @@ jobs:
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build website
|
||||
working-directory: gen-docs
|
||||
run: pnpm build
|
||||
run: |
|
||||
cd gen-docs
|
||||
pnpm build
|
||||
|
||||
- name: Upload Build Artifact
|
||||
uses: actions/upload-pages-artifact@v4
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: gen-docs/build
|
||||
|
||||
deploy:
|
||||
name: Deploy to GitHub Pages
|
||||
needs: build
|
||||
runs-on: ubuntu-24.04
|
||||
concurrency: build-deploy-pages
|
||||
|
||||
# Grant GITHUB_TOKEN the permissions required to make a Pages deployment
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
pages: write # to deploy to Pages
|
||||
id-token: write # to verify the deployment originates from an appropriate source
|
||||
|
||||
# Deploy to the github-pages environment
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# - name: Download Build Artifact
|
||||
# uses: actions/download-artifact@v4
|
||||
# with:
|
||||
# name: docusaurus-build
|
||||
# path: gen-docs/build
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
29
.github/workflows/helm.yml
vendored
29
.github/workflows/helm.yml
vendored
@@ -4,21 +4,11 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- 'charts/**'
|
||||
- '.github/workflows/release-charts.yml'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: helm-charts
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
package-helm-chart:
|
||||
name: Package helm chart
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
@@ -29,7 +19,6 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install helm
|
||||
uses: azure/setup-helm@v4
|
||||
@@ -53,11 +42,16 @@ jobs:
|
||||
# get current version
|
||||
current_version=$(grep '^version:' "$chart_path/Chart.yaml" | awk '{print $2}')
|
||||
# try to get current release version
|
||||
if oras manifest fetch "ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:${current_version}" >/dev/null 2>&1; then
|
||||
echo "No version change for $chart_name. Skipping."
|
||||
else
|
||||
set +e
|
||||
oras discover ghcr.io/${GITHUB_REPOSITORY@L}/${chart_name}:${current_version}
|
||||
oras_exit_code=$?
|
||||
set -e
|
||||
|
||||
if [ $oras_exit_code -ne 0 ]; then
|
||||
helm dependency build "$chart_path"
|
||||
helm package "$chart_path" --destination ./.cr-release-packages
|
||||
else
|
||||
echo "No version change for $chart_name. Skipping."
|
||||
fi
|
||||
else
|
||||
echo "Skipping $chart_name: Not a valid Helm chart"
|
||||
@@ -67,7 +61,7 @@ jobs:
|
||||
- name: Check if artifacts exist
|
||||
id: check-artifacts
|
||||
run: |
|
||||
if ls .cr-release-packages/*.tgz >/dev/null 2>&1; then
|
||||
if ls .cr-release-packages/* >/dev/null 2>&1; then
|
||||
echo "has_artifacts=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has_artifacts=false" >> $GITHUB_OUTPUT
|
||||
@@ -83,7 +77,7 @@ jobs:
|
||||
|
||||
publish:
|
||||
name: Publish to ghcr.io
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write # needed for pushing to github registry
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
@@ -94,7 +88,6 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install helm
|
||||
uses: azure/setup-helm@v4
|
||||
|
||||
29
.github/workflows/lint-helm-charts.yml
vendored
29
.github/workflows/lint-helm-charts.yml
vendored
@@ -7,48 +7,27 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/lint-helm-charts.yml'
|
||||
- 'charts/**'
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- 'charts/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: charts-lint-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint-test:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2
|
||||
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
- name: Ensure documentation is updated
|
||||
uses: docker://jnorwood/helm-docs:v1.14.2
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.6.1
|
||||
- name: Run chart-testing (list-changed)
|
||||
id: list-changed
|
||||
run: |
|
||||
changed=$(ct list-changed --target-branch ${{ github.event.repository.default_branch }})
|
||||
if [[ -n "$changed" ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
echo "$changed"
|
||||
fi
|
||||
|
||||
- name: Run chart-testing
|
||||
if: steps.list-changed.outputs.changed == 'true'
|
||||
run: ct lint --target-branch ${{ github.event.repository.default_branch }} --validate-maintainers=false
|
||||
|
||||
130
.github/workflows/preview.yml
vendored
130
.github/workflows/preview.yml
vendored
@@ -4,125 +4,28 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'preview-*'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: preview-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build (per-arch, native runners)
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
- runner: ubuntu-24.04-arm
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.runner }}
|
||||
build_and_push:
|
||||
name: Build & Publish Docker Preview Images
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get the version
|
||||
id: get_version
|
||||
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Derive preview version from tag
|
||||
id: ver
|
||||
shell: bash
|
||||
run: |
|
||||
TAG="${GITHUB_REF_NAME}"
|
||||
VER="${TAG#preview-}"
|
||||
VER="${VER#v}"
|
||||
echo "version=${VER}" >> "$GITHUB_OUTPUT"
|
||||
echo "Building preview version: ${VER}"
|
||||
|
||||
- name: Warm cache (no push) — ${{ matrix.platform }}
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: false
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=${{ steps.ver.outputs.version }}
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
cache-from: type=gha,scope=${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
|
||||
provenance: false
|
||||
|
||||
publish:
|
||||
name: Publish multi-arch image
|
||||
needs: build
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Derive preview version from tag
|
||||
id: ver
|
||||
shell: bash
|
||||
run: |
|
||||
TAG="${GITHUB_REF_NAME}"
|
||||
VER="${TAG#preview-}"
|
||||
VER="${VER#v}"
|
||||
echo "version=${VER}" >> "$GITHUB_OUTPUT"
|
||||
echo "Publishing preview version: ${VER}"
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository }}
|
||||
ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=raw,value=preview-${{ steps.ver.outputs.version }}
|
||||
labels: |
|
||||
org.opencontainers.image.version=preview-${{ steps.ver.outputs.version }}
|
||||
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
|
||||
- name: Build & Push (multi-arch, single tag)
|
||||
uses: docker/build-push-action@v6
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -130,12 +33,7 @@ jobs:
|
||||
push: true
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=${{ steps.ver.outputs.version }}
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
cache-from: |
|
||||
type=gha,scope=linux/amd64
|
||||
type=gha,scope=linux/arm64
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
BUILD_VERSION=${{ steps.get_version.outputs.VERSION }}
|
||||
BUILD_DATE=${{ github.event.repository.updated_at }}
|
||||
tags: |
|
||||
fallenbagel/jellyseerr:${{ steps.get_version.outputs.VERSION }}
|
||||
|
||||
230
.github/workflows/release.yml
vendored
230
.github/workflows/release.yml
vendored
@@ -1,14 +1,6 @@
|
||||
name: Jellyseerr Release
|
||||
name: Jellyseer Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: release-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
semantic-release:
|
||||
@@ -16,29 +8,38 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
HUSKY: 0
|
||||
outputs:
|
||||
new_release_published: ${{ steps.release.outputs.new_release_published }}
|
||||
new_release_version: ${{ steps.release.outputs.new_release_version }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: package.json
|
||||
|
||||
node-version: 22
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GH_TOKEN }}
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
with:
|
||||
version: 9
|
||||
- name: Get pnpm store directory
|
||||
shell: sh
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -46,151 +47,77 @@ jobs:
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
- name: Release
|
||||
id: release
|
||||
uses: cycjimmy/semantic-release-action@v5
|
||||
with:
|
||||
extra_plugins: |
|
||||
@semantic-release/git@10
|
||||
@semantic-release/changelog@6
|
||||
@codedependant/semantic-release-docker@5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: npx semantic-release
|
||||
|
||||
build:
|
||||
name: Build (per-arch, native runners)
|
||||
needs: semantic-release
|
||||
if: needs.semantic-release.outputs.new_release_published == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
- runner: ubuntu-24.04-arm
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Warm cache (no push) — ${{ matrix.platform }}
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: false
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }}
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
cache-from: type=gha,scope=${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.platform }}
|
||||
provenance: false
|
||||
|
||||
publish:
|
||||
name: Publish multi-arch image
|
||||
needs: [semantic-release, build]
|
||||
if: needs.semantic-release.outputs.new_release_published == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Commit timestamp
|
||||
id: ts
|
||||
run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository }}
|
||||
ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=raw,value=${{ needs.semantic-release.outputs.new_release_version }}
|
||||
labels: |
|
||||
org.opencontainers.image.created=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
|
||||
- name: Build & Push (multi-arch, single tag)
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ github.sha }}
|
||||
BUILD_VERSION=${{ needs.semantic-release.outputs.new_release_version }}
|
||||
SOURCE_DATE_EPOCH=${{ steps.ts.outputs.TIMESTAMP }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
cache-from: |
|
||||
type=gha,scope=linux/amd64
|
||||
type=gha,scope=linux/arm64
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
|
||||
- name: Also tag :latest (non-pre-release only)
|
||||
shell: bash
|
||||
run: |
|
||||
VER="${{ needs.semantic-release.outputs.new_release_version }}"
|
||||
if [[ "$VER" != *"-"* ]]; then
|
||||
docker buildx imagetools create \
|
||||
-t ${{ github.repository }}:latest \
|
||||
${{ github.repository }}:${VER}
|
||||
docker buildx imagetools create \
|
||||
-t ghcr.io/${{ github.repository }}:latest \
|
||||
ghcr.io/${{ github.repository }}:${VER}
|
||||
fi
|
||||
# build-snap:
|
||||
# name: Build Snap Package (${{ matrix.architecture }})
|
||||
# needs: semantic-release
|
||||
# runs-on: ubuntu-22.04
|
||||
# strategy:
|
||||
# fail-fast: false
|
||||
# matrix:
|
||||
# architecture:
|
||||
# - amd64
|
||||
# - arm64
|
||||
# steps:
|
||||
# - name: Checkout Code
|
||||
# uses: actions/checkout@v4
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - name: Switch to main branch
|
||||
# run: git checkout main
|
||||
# - name: Pull latest changes
|
||||
# run: git pull
|
||||
# - name: Prepare
|
||||
# id: prepare
|
||||
# run: |
|
||||
# git fetch --prune --tags
|
||||
# if [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
|
||||
# echo "RELEASE=stable" >> $GITHUB_OUTPUT
|
||||
# else
|
||||
# echo "RELEASE=edge" >> $GITHUB_OUTPUT
|
||||
# fi
|
||||
# - name: Set Up QEMU
|
||||
# uses: docker/setup-qemu-action@v3
|
||||
# with:
|
||||
# image: tonistiigi/binfmt@sha256:df15403e06a03c2f461c1f7938b171fda34a5849eb63a70e2a2109ed5a778bde
|
||||
# - name: Build Snap Package
|
||||
# uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
# id: build
|
||||
# with:
|
||||
# architecture: ${{ matrix.architecture }}
|
||||
# - name: Upload Snap Package
|
||||
# uses: actions/upload-artifact@v4
|
||||
# with:
|
||||
# name: jellyseerr-snap-package-${{ matrix.architecture }}
|
||||
# path: ${{ steps.build.outputs.snap }}
|
||||
# - name: Review Snap Package
|
||||
# uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
# with:
|
||||
# snap: ${{ steps.build.outputs.snap }}
|
||||
# - name: Publish Snap Package
|
||||
# uses: snapcore/action-publish@v1
|
||||
# env:
|
||||
# SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_LOGIN }}
|
||||
# with:
|
||||
# snap: ${{ steps.build.outputs.snap }}
|
||||
# release: ${{ steps.prepare.outputs.RELEASE }}
|
||||
|
||||
discord:
|
||||
name: Send Discord Notification
|
||||
needs: publish
|
||||
needs: semantic-release
|
||||
if: always()
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
@@ -200,7 +127,6 @@ jobs:
|
||||
else
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
|
||||
94
.github/workflows/snap.yaml.disabled
vendored
Normal file
94
.github/workflows/snap.yaml.disabled
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: Publish Snap
|
||||
|
||||
# turn off edge snap builds temporarily and make it manual
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - develop
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
jobs:
|
||||
name: Job Check
|
||||
runs-on: ubuntu-22.04
|
||||
if: "!contains(github.event.head_commit.message, '[skip ci]')"
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.12.1
|
||||
with:
|
||||
access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
build-snap:
|
||||
name: Build Snap Package (${{ matrix.architecture }})
|
||||
needs: jobs
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
architecture:
|
||||
- amd64
|
||||
- arm64
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
git fetch --prune --unshallow --tags
|
||||
if [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
|
||||
echo "RELEASE=stable" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "RELEASE=edge" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Configure Git
|
||||
run: git config --add safe.directory /data/parts/jellyseerr/src
|
||||
- name: Build Snap Package
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
id: build
|
||||
with:
|
||||
architecture: ${{ matrix.architecture }}
|
||||
- name: Upload Snap Package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jellyseerr-snap-package-${{ matrix.architecture }}
|
||||
path: ${{ steps.build.outputs.snap }}
|
||||
- name: Review Snap Package
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
- name: Publish Snap Package
|
||||
uses: snapcore/action-publish@v1
|
||||
env:
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_LOGIN }}
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: ${{ steps.prepare.outputs.RELEASE }}
|
||||
|
||||
discord:
|
||||
name: Send Discord Notification
|
||||
needs: build-snap
|
||||
if: always() && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
51
.github/workflows/support.yml
vendored
51
.github/workflows/support.yml
vendored
@@ -4,53 +4,22 @@ on:
|
||||
issues:
|
||||
types: [labeled, unlabeled, reopened]
|
||||
|
||||
permissions:
|
||||
issues: read
|
||||
|
||||
concurrency:
|
||||
group: support-${{ github.event.issue.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
support:
|
||||
if: github.event.label.name == 'support' || github.event.action == 'reopened'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
issues: write
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label added, comment and close issue
|
||||
if: github.event.action == 'labeled' && github.event.label.name == 'support'
|
||||
shell: bash
|
||||
env:
|
||||
BODY: >
|
||||
:wave: @${{ env.ISSUE_AUTHOR }}, we use the issue tracker exclusively
|
||||
- uses: dessant/support-requests@v4
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
support-label: 'support'
|
||||
issue-comment: >
|
||||
:wave: @{issue-author}, we use the issue tracker exclusively
|
||||
for bug reports and feature requests. However, this issue appears
|
||||
to be a support request. Please use our support channels
|
||||
to get help with Jellyseerr.
|
||||
|
||||
- [Discord](https://discord.gg/ckbvBtDJgC)
|
||||
run: |
|
||||
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
|
||||
retry gh issue comment "$NUMBER" -R "$GH_REPO" -b "$BODY" || true
|
||||
retry gh issue close "$NUMBER" -R "$GH_REPO" || true
|
||||
gh issue lock "$NUMBER" -R "$GH_REPO" -r "off_topic" || true
|
||||
|
||||
- name: Reopened or label removed, unlock issue
|
||||
if: github.event.action == 'unlabeled' && github.event.label.name == 'support'
|
||||
shell: bash
|
||||
run: |
|
||||
retry() { n=0; until "$@"; do n=$((n+1)); [ $n -ge 3 ] && break; echo "retry $n: $*" >&2; sleep 2; done; }
|
||||
retry gh issue reopen "$NUMBER" -R "$GH_REPO" || true
|
||||
gh issue unlock "$NUMBER" -R "$GH_REPO" || true
|
||||
|
||||
- name: Remove support label on manual reopen
|
||||
if: github.event.action == 'reopened'
|
||||
shell: bash
|
||||
run: |
|
||||
gh issue edit "$NUMBER" -R "$GH_REPO" --remove-label "support" || true
|
||||
gh issue unlock "$NUMBER" -R "$GH_REPO" || true
|
||||
close-issue: true
|
||||
lock-issue: true
|
||||
issue-lock-reason: 'off-topic'
|
||||
|
||||
18
.github/workflows/test-docs-deploy.yml
vendored
18
.github/workflows/test-docs-deploy.yml
vendored
@@ -8,32 +8,24 @@ on:
|
||||
- 'docs/**'
|
||||
- 'gen-docs/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: docs-pr-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-deploy:
|
||||
name: Test deployment
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: package.json
|
||||
node-version: 20
|
||||
|
||||
- name: Pnpm Setup
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: sh
|
||||
@@ -50,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd gen-docs
|
||||
cd gen-docs
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build website
|
||||
|
||||
@@ -4,16 +4,11 @@ dist/
|
||||
config/
|
||||
CHANGELOG.md
|
||||
pnpm-lock.yaml
|
||||
cypress/config/settings.cypress.json
|
||||
|
||||
# assets
|
||||
src/assets/
|
||||
public/
|
||||
!public/sw.js
|
||||
docs/
|
||||
!/public/
|
||||
/public/*
|
||||
!/public/sw.js
|
||||
|
||||
# helm charts
|
||||
**/charts
|
||||
|
||||
@@ -21,11 +21,5 @@ module.exports = {
|
||||
rangeEnd: 0, // default: Infinity
|
||||
},
|
||||
},
|
||||
{
|
||||
files: 'cypress/config/settings.cypress.json',
|
||||
options: {
|
||||
rangeEnd: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
@@ -20,8 +20,5 @@
|
||||
"files.associations": {
|
||||
"globals.css": "tailwindcss"
|
||||
},
|
||||
"i18n-ally.localesPaths": [
|
||||
"src/i18n/locale"
|
||||
],
|
||||
"yaml.format.singleQuote": true
|
||||
"i18n-ally.localesPaths": ["src/i18n/locale"]
|
||||
}
|
||||
|
||||
@@ -2,45 +2,6 @@
|
||||
|
||||
All help is welcome and greatly appreciated! If you would like to contribute to the project, the following instructions should get you started...
|
||||
|
||||
## AI Assistance Notice
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> If you are using **any kind of AI assistance** to contribute to Jellyseerr,
|
||||
> it must be disclosed in the pull request.
|
||||
|
||||
If you are using any kind of AI assistance while contributing to Jellyseerr,
|
||||
**this must be disclosed in the pull request**, along with the extent to
|
||||
which AI assistance was used (e.g. docs only vs. code generation).
|
||||
If PR responses are being generated by an AI, disclose that as well.
|
||||
As a small exception, trivial tab-completion doesn't need to be disclosed,
|
||||
so long as it is limited to single keywords or short phrases.
|
||||
|
||||
An example disclosure:
|
||||
|
||||
> This PR was written primarily by Claude Code.
|
||||
|
||||
Or a more detailed disclosure:
|
||||
|
||||
> I consulted ChatGPT to understand the codebase but the solution
|
||||
> was fully authored manually by myself.
|
||||
|
||||
Failure to disclose this is first and foremost rude to the human operators
|
||||
on the other end of the pull request, but it also makes it difficult to
|
||||
determine how much scrutiny to apply to the contribution.
|
||||
|
||||
In a perfect world, AI assistance would produce equal or higher quality
|
||||
work than any human. That isn't the world we live in today, and in most cases
|
||||
it's generating slop. I say this despite being a fan of and using them
|
||||
successfully myself (with heavy supervision)!
|
||||
|
||||
When using AI assistance, we expect contributors to understand the code
|
||||
that is produced and be able to answer critical questions about it. It
|
||||
isn't a maintainers job to review a PR so broken that it requires
|
||||
significant rework to be acceptable.
|
||||
|
||||
Please be respectful to maintainers and disclose AI assistance.
|
||||
|
||||
## Development
|
||||
|
||||
### Tools Required
|
||||
@@ -197,4 +158,4 @@ DB_TYPE="postgres" DB_USER=postgres DB_PASS=postgres pnpm migration:generate ser
|
||||
|
||||
## Attribution
|
||||
|
||||
This contribution guide was inspired by the [Next.js](https://github.com/vercel/next.js), [Radarr](https://github.com/Radarr/Radarr), [Overseerr](https://github.com/sct/Overseerr) and [Ghostty](https://github.com/ghostty-org/ghostty) contribution guides.
|
||||
This contribution guide was inspired by the [Next.js](https://github.com/vercel/next.js), [Radarr](https://github.com/Radarr/Radarr), and [Overseerr](https://github.com/sct/Overseerr) contribution guides.
|
||||
|
||||
35
Dockerfile
35
Dockerfile
@@ -2,11 +2,8 @@ FROM node:22-alpine AS BUILD_IMAGE
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ARG SOURCE_DATE_EPOCH
|
||||
ARG TARGETPLATFORM
|
||||
ARG COMMIT_TAG
|
||||
ENV TARGETPLATFORM=${TARGETPLATFORM:-linux/amd64}
|
||||
ENV COMMIT_TAG=${COMMIT_TAG}
|
||||
|
||||
RUN \
|
||||
case "${TARGETPLATFORM}" in \
|
||||
@@ -17,27 +14,47 @@ RUN \
|
||||
;; \
|
||||
esac
|
||||
|
||||
RUN npm install --global pnpm@10
|
||||
RUN npm install --global pnpm@9
|
||||
|
||||
COPY package.json pnpm-lock.yaml postinstall-win.js ./
|
||||
RUN CYPRESS_INSTALL_BINARY=0 pnpm install --frozen-lockfile
|
||||
|
||||
COPY . ./
|
||||
|
||||
ARG COMMIT_TAG
|
||||
ENV COMMIT_TAG=${COMMIT_TAG}
|
||||
|
||||
RUN pnpm build
|
||||
|
||||
# remove development dependencies
|
||||
RUN pnpm prune --prod --ignore-scripts && \
|
||||
rm -rf src server .next/cache charts gen-docs docs && \
|
||||
touch config/DOCKER && \
|
||||
echo "{\"commitTag\": \"${COMMIT_TAG}\"}" > committag.json
|
||||
RUN pnpm prune --prod --ignore-scripts
|
||||
|
||||
RUN rm -rf src server .next/cache charts gen-docs docs
|
||||
|
||||
RUN touch config/DOCKER
|
||||
|
||||
RUN echo "{\"commitTag\": \"${COMMIT_TAG}\"}" > committag.json
|
||||
|
||||
|
||||
FROM node:22-alpine
|
||||
|
||||
# OCI Meta information
|
||||
ARG BUILD_DATE
|
||||
ARG BUILD_VERSION
|
||||
LABEL \
|
||||
org.opencontainers.image.authors="Fallenbagel" \
|
||||
org.opencontainers.image.source="https://github.com/fallenbagel/jellyseerr" \
|
||||
org.opencontainers.image.created=${BUILD_DATE} \
|
||||
org.opencontainers.image.version=${BUILD_VERSION} \
|
||||
org.opencontainers.image.title="Jellyseerr" \
|
||||
org.opencontainers.image.description="Open-source media request and discovery manager for Jellyfin, Plex, and Emby." \
|
||||
org.opencontainers.image.licenses="MIT"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache tzdata tini && rm -rf /tmp/*
|
||||
|
||||
RUN npm install -g pnpm@10
|
||||
RUN npm install -g pnpm@9
|
||||
|
||||
# copy from build image
|
||||
COPY --from=BUILD_IMAGE /app ./
|
||||
|
||||
@@ -3,7 +3,7 @@ FROM node:22-alpine
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm install --global pnpm@10
|
||||
RUN npm install --global pnpm@9
|
||||
|
||||
RUN pnpm install
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<a href="http://translate.jellyseerr.dev/engage/jellyseerr/"><img src="http://translate.jellyseerr.dev/widget/jellyseerr/jellyseerr-frontend/svg-badge.svg" alt="Translation status" /></a>
|
||||
<a href="https://github.com/fallenbagel/jellyseerr/blob/develop/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/fallenbagel/jellyseerr"></a>
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
<a href="#contributors-"><img alt="All Contributors" src="https://img.shields.io/badge/all_contributors-71-orange.svg"/></a>
|
||||
<a href="#contributors-"><img alt="All Contributors" src="https://img.shields.io/badge/all_contributors-69-orange.svg"/></a>
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
**Jellyseerr** is a free and open source software application for managing requests for your media library. It integrates with the media server of your choice: [Jellyfin](https://jellyfin.org), [Plex](https://plex.tv), and [Emby](https://emby.media/). In addition, it integrates with your existing services, such as **[Sonarr](https://sonarr.tv/)**, **[Radarr](https://radarr.video/)**.
|
||||
@@ -173,10 +173,6 @@ Thanks goes to these wonderful people from Overseerr ([emoji key](https://allcon
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JamsRepos"><img src="https://avatars.githubusercontent.com/u/1347620?v=4?s=100" width="100px;" alt="Jam"/><br /><sub><b>Jam</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=JamsRepos" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="http://www.joelowrance.com"><img src="https://avatars.githubusercontent.com/u/63176?v=4?s=100" width="100px;" alt="Joe Lowrance"/><br /><sub><b>Joe Lowrance</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=joelowrance" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/0xSysR3ll"><img src="https://avatars.githubusercontent.com/u/31414959?v=4?s=100" width="100px;" alt="0xsysr3ll"/><br /><sub><b>0xsysr3ll</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=0xSysR3ll" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://sudo-kraken.github.io/docs/"><img src="https://avatars.githubusercontent.com/u/53116754?v=4?s=100" width="100px;" alt="Joe Harrison"/><br /><sub><b>Joe Harrison</b></sub></a><br /><a href="#infra-sudo-kraken" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/ale183"><img src="https://avatars.githubusercontent.com/u/8809439?v=4?s=100" width="100px;" alt="ale183"/><br /><sub><b>ale183</b></sub></a><br /><a href="https://github.com/fallenbagel/jellyseerr/commits?author=ale183" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
@@ -3,8 +3,8 @@ kubeVersion: ">=1.23.0-0"
|
||||
name: jellyseerr-chart
|
||||
description: Jellyseerr helm chart for Kubernetes
|
||||
type: application
|
||||
version: 2.7.0
|
||||
appVersion: "2.7.3"
|
||||
version: 2.6.0
|
||||
appVersion: "2.7.0"
|
||||
maintainers:
|
||||
- name: Jellyseerr
|
||||
url: https://github.com/Fallenbagel/jellyseerr
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# jellyseerr-chart
|
||||
|
||||
  
|
||||
  
|
||||
|
||||
Jellyseerr helm chart for Kubernetes
|
||||
|
||||
@@ -20,17 +20,6 @@ Jellyseerr helm chart for Kubernetes
|
||||
|
||||
Kubernetes: `>=1.23.0-0`
|
||||
|
||||
## Update Notes
|
||||
|
||||
### Updating to 2.7.0
|
||||
|
||||
Jellyseerr is a stateful application and it is not designed to have multiple replicas. In version 2.7.0 we address this by:
|
||||
|
||||
- replacing `Deployment` with `StatefulSet`
|
||||
- removing `replicaCount` value
|
||||
|
||||
If `replicaCount` value was used - remove it. Helm update should work fine after that.
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
@@ -66,6 +55,7 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
|
||||
| probes.livenessProbe | object | `{}` | Configure liveness probe |
|
||||
| probes.readinessProbe | object | `{}` | Configure readiness probe |
|
||||
| probes.startupProbe | string | `nil` | Configure startup probe |
|
||||
| replicaCount | int | `1` | |
|
||||
| resources | object | `{}` | |
|
||||
| securityContext | object | `{}` | |
|
||||
| service.port | int | `80` | |
|
||||
@@ -74,6 +64,7 @@ If `replicaCount` value was used - remove it. Helm update should work fine after
|
||||
| serviceAccount.automount | bool | `true` | Automatically mount a ServiceAccount's API credentials? |
|
||||
| serviceAccount.create | bool | `true` | Specifies whether a service account should be created |
|
||||
| serviceAccount.name | string | `""` | If not set and create is true, a name is generated using the fullname template |
|
||||
| strategy | object | `{"type":"Recreate"}` | Deployment strategy |
|
||||
| tolerations | list | `[]` | |
|
||||
| volumeMounts | list | `[]` | Additional volumeMounts on the output StatefulSet definition. |
|
||||
| volumes | list | `[]` | Additional volumes on the output StatefulSet definition. |
|
||||
| volumeMounts | list | `[]` | Additional volumeMounts on the output Deployment definition. |
|
||||
| volumes | list | `[]` | Additional volumes on the output Deployment definition. |
|
||||
|
||||
@@ -14,15 +14,4 @@
|
||||
|
||||
{{ template "chart.requirementsSection" . }}
|
||||
|
||||
## Update Notes
|
||||
|
||||
### Updating to 2.7.0
|
||||
|
||||
Jellyseerr is a stateful application and it is not designed to have multiple replicas. In version 2.7.0 we address this by:
|
||||
|
||||
- replacing `Deployment` with `StatefulSet`
|
||||
- removing `replicaCount` value
|
||||
|
||||
If `replicaCount` value was used - remove it. Helm update should work fine after that.
|
||||
|
||||
{{ template "chart.valuesSection" . }}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
apiVersion: apps/v1
|
||||
kind: StatefulSet
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "jellyseerr.fullname" . }}
|
||||
labels:
|
||||
{{- include "jellyseerr.labels" . | nindent 4 }}
|
||||
spec:
|
||||
serviceName: {{ include "jellyseerr.fullname" . }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
strategy:
|
||||
type: {{ .Values.strategy.type }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "jellyseerr.selectorLabels" . | nindent 6 }}
|
||||
@@ -1,3 +1,5 @@
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
registry: ghcr.io
|
||||
repository: fallenbagel/jellyseerr
|
||||
@@ -10,6 +12,10 @@ imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
# -- Deployment strategy
|
||||
strategy:
|
||||
type: Recreate
|
||||
|
||||
# Liveness / Readiness / Startup Probes
|
||||
probes:
|
||||
# -- Configure liveness probe
|
||||
@@ -109,14 +115,14 @@ resources: {}
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
# -- Additional volumes on the output StatefulSet definition.
|
||||
# -- Additional volumes on the output Deployment definition.
|
||||
volumes: []
|
||||
# - name: foo
|
||||
# secret:
|
||||
# secretName: mysecret
|
||||
# optional: false
|
||||
|
||||
# -- Additional volumeMounts on the output StatefulSet definition.
|
||||
# -- Additional volumeMounts on the output Deployment definition.
|
||||
volumeMounts: []
|
||||
# - name: foo
|
||||
# mountPath: "/etc/foo"
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"apiKey": "testkey",
|
||||
"applicationTitle": "Jellyseerr",
|
||||
"applicationUrl": "",
|
||||
"csrfProtection": false,
|
||||
"cacheImages": false,
|
||||
"defaultPermissions": 32,
|
||||
"defaultQuotas": {
|
||||
@@ -82,6 +83,13 @@
|
||||
"enableMentions": true
|
||||
}
|
||||
},
|
||||
"lunasea": {
|
||||
"enabled": false,
|
||||
"types": 0,
|
||||
"options": {
|
||||
"webhookUrl": ""
|
||||
}
|
||||
},
|
||||
"slack": {
|
||||
"enabled": false,
|
||||
"types": 0,
|
||||
@@ -179,26 +187,5 @@
|
||||
"image-cache-cleanup": {
|
||||
"schedule": "0 0 5 * * *"
|
||||
}
|
||||
},
|
||||
"network": {
|
||||
"csrfProtection": false,
|
||||
"trustProxy": false,
|
||||
"forceIpv4First": false,
|
||||
"dnsServers": "",
|
||||
"proxy": {
|
||||
"enabled": false,
|
||||
"hostname": "",
|
||||
"port": 8080,
|
||||
"useSsl": false,
|
||||
"user": "",
|
||||
"password": "",
|
||||
"bypassFilter": "",
|
||||
"bypassLocalAddresses": true
|
||||
},
|
||||
"dnsCache": {
|
||||
"enabled": false,
|
||||
"forceMinTtl": 0,
|
||||
"forceMaxTtl": -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
describe('TVDB Integration', () => {
|
||||
// Constants for routes and selectors
|
||||
const ROUTES = {
|
||||
home: '/',
|
||||
metadataSettings: '/settings/metadata',
|
||||
tomorrowIsOursTvShow: '/tv/72879',
|
||||
monsterTvShow: '/tv/225634',
|
||||
dragonnBallZKaiAnime: '/tv/61709',
|
||||
};
|
||||
|
||||
const SELECTORS = {
|
||||
sidebarToggle: '[data-testid=sidebar-toggle]',
|
||||
sidebarSettingsMobile: '[data-testid=sidebar-menu-settings-mobile]',
|
||||
settingsNavDesktop: 'nav[data-testid="settings-nav-desktop"]',
|
||||
metadataTestButton: 'button[type="button"]:contains("Test")',
|
||||
metadataSaveButton: '[data-testid="metadata-save-button"]',
|
||||
tmdbStatus: '[data-testid="tmdb-status"]',
|
||||
tvdbStatus: '[data-testid="tvdb-status"]',
|
||||
tvMetadataProviderSelector: '[data-testid="tv-metadata-provider-selector"]',
|
||||
animeMetadataProviderSelector:
|
||||
'[data-testid="anime-metadata-provider-selector"]',
|
||||
seasonSelector: '[data-testid="season-selector"]',
|
||||
season1: 'Season 1',
|
||||
season2: 'Season 2',
|
||||
season3: 'Season 3',
|
||||
episodeList: '[data-testid="episode-list"]',
|
||||
episode9: '9 - Hang Men',
|
||||
};
|
||||
|
||||
// Reusable commands
|
||||
const navigateToMetadataSettings = () => {
|
||||
cy.visit(ROUTES.home);
|
||||
cy.get(SELECTORS.sidebarToggle).click();
|
||||
cy.get(SELECTORS.sidebarSettingsMobile).click();
|
||||
cy.get(
|
||||
`${SELECTORS.settingsNavDesktop} a[href="${ROUTES.metadataSettings}"]`
|
||||
).click();
|
||||
};
|
||||
|
||||
const testAndVerifyMetadataConnection = () => {
|
||||
cy.intercept('POST', '/api/v1/settings/metadatas/test').as(
|
||||
'testConnection'
|
||||
);
|
||||
cy.get(SELECTORS.metadataTestButton).click();
|
||||
return cy.wait('@testConnection');
|
||||
};
|
||||
|
||||
const saveMetadataSettings = (customBody = null) => {
|
||||
if (customBody) {
|
||||
cy.intercept('PUT', '/api/v1/settings/metadatas', (req) => {
|
||||
req.body = customBody;
|
||||
}).as('saveMetadata');
|
||||
} else {
|
||||
// Else just intercept without modifying body
|
||||
cy.intercept('PUT', '/api/v1/settings/metadatas').as('saveMetadata');
|
||||
}
|
||||
|
||||
cy.get(SELECTORS.metadataSaveButton).click();
|
||||
return cy.wait('@saveMetadata');
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Perform login
|
||||
cy.login(Cypress.env('ADMIN_EMAIL'), Cypress.env('ADMIN_PASSWORD'));
|
||||
|
||||
// Navigate to Metadata settings
|
||||
navigateToMetadataSettings();
|
||||
|
||||
// Verify we're on the correct settings page
|
||||
cy.contains('h3', 'Metadata Providers').should('be.visible');
|
||||
|
||||
// Configure TVDB as TV provider and test connection
|
||||
cy.get(SELECTORS.tvMetadataProviderSelector).click();
|
||||
|
||||
// get id react-select-4-option-1
|
||||
cy.get('[class*="react-select__option"]').contains('TheTVDB').click();
|
||||
|
||||
// Test the connection
|
||||
testAndVerifyMetadataConnection().then(({ response }) => {
|
||||
expect(response.statusCode).to.equal(200);
|
||||
// Check TVDB connection status
|
||||
cy.get(SELECTORS.tvdbStatus).should('contain', 'Operational');
|
||||
});
|
||||
|
||||
// Save settings
|
||||
saveMetadataSettings({
|
||||
anime: 'tvdb',
|
||||
tv: 'tvdb',
|
||||
}).then(({ response }) => {
|
||||
expect(response.statusCode).to.equal(200);
|
||||
expect(response.body.tv).to.equal('tvdb');
|
||||
});
|
||||
});
|
||||
|
||||
it('should display "Tomorrow is Ours" show information with multiple seasons from TVDB', () => {
|
||||
// Navigate to the TV show
|
||||
cy.visit(ROUTES.tomorrowIsOursTvShow);
|
||||
|
||||
// Verify that multiple seasons are displayed (TMDB has only 1 season, TVDB has multiple)
|
||||
// cy.get(SELECTORS.seasonSelector).should('exist');
|
||||
cy.intercept('/api/v1/tv/225634/season/1').as('season1');
|
||||
// Select Season 2 and verify it loads
|
||||
cy.contains(SELECTORS.season2)
|
||||
.should('be.visible')
|
||||
.scrollIntoView()
|
||||
.click();
|
||||
|
||||
// Verify that episodes are displayed for Season 2
|
||||
cy.contains('260 - Episode 506').should('be.visible');
|
||||
});
|
||||
|
||||
it('Should display "Monster" show information correctly when not existing on TVDB', () => {
|
||||
// Navigate to the TV show
|
||||
cy.visit(ROUTES.monsterTvShow);
|
||||
|
||||
// Intercept season 1 request
|
||||
cy.intercept('/api/v1/tv/225634/season/1').as('season1');
|
||||
|
||||
// Select Season 1
|
||||
cy.contains(SELECTORS.season1)
|
||||
.should('be.visible')
|
||||
.scrollIntoView()
|
||||
.click();
|
||||
|
||||
// Wait for the season data to load
|
||||
cy.wait('@season1');
|
||||
|
||||
// Verify specific episode exists
|
||||
cy.contains(SELECTORS.episode9).should('be.visible');
|
||||
});
|
||||
|
||||
it('should display "Dragon Ball Z Kai" show information with multiple only 2 seasons from TVDB', () => {
|
||||
// Navigate to the TV show
|
||||
cy.visit(ROUTES.dragonnBallZKaiAnime);
|
||||
|
||||
// Intercept season 1 request
|
||||
cy.intercept('/api/v1/tv/61709/season/1').as('season1');
|
||||
|
||||
// Select Season 2 and verify it visible
|
||||
cy.contains(SELECTORS.season2)
|
||||
.should('be.visible')
|
||||
.scrollIntoView()
|
||||
.click();
|
||||
|
||||
// select season 3 and verify it not visible
|
||||
cy.contains(SELECTORS.season3).should('not.exist');
|
||||
});
|
||||
});
|
||||
@@ -12,7 +12,7 @@ Jellyseerr supports SQLite and PostgreSQL. The database connection can be config
|
||||
If you want to use SQLite, you can simply set the `DB_TYPE` environment variable to `sqlite`. This is the default configuration so even if you don't set any other options, SQLite will be used.
|
||||
|
||||
```dotenv
|
||||
DB_TYPE=sqlite # Which DB engine to use, either sqlite or postgres. The default is sqlite.
|
||||
DB_TYPE="sqlite" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
|
||||
CONFIG_DIRECTORY="config" # (optional) The path to the config directory where the db file is stored. The default is "config".
|
||||
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
|
||||
```
|
||||
@@ -24,7 +24,7 @@ DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging.
|
||||
If your PostgreSQL server is configured to accept TCP connections, you can specify the host and port using the `DB_HOST` and `DB_PORT` environment variables. This is useful for remote connections where the server uses a network host and port.
|
||||
|
||||
```dotenv
|
||||
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite.
|
||||
DB_TYPE="postgres" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
|
||||
DB_HOST="localhost" # (optional) The host (URL) of the database. The default is "localhost".
|
||||
DB_PORT="5432" # (optional) The port to connect to. The default is "5432".
|
||||
DB_USER= # (required) Username used to connect to the database.
|
||||
@@ -38,7 +38,7 @@ DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging.
|
||||
If your PostgreSQL server is configured to accept Unix socket connections, you can specify the path to the socket directory using the `DB_SOCKET_PATH` environment variable. This is useful for local connections where the server uses a Unix socket.
|
||||
|
||||
```dotenv
|
||||
DB_TYPE=postgres # Which DB engine to use, either sqlite or postgres. The default is sqlite.
|
||||
DB_TYPE="postgres" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
|
||||
DB_SOCKET_PATH="/var/run/postgresql" # (required) The path to the PostgreSQL Unix socket directory.
|
||||
DB_USER= # (required) Username used to connect to the database.
|
||||
DB_PASS= # (optional) Password of the user used to connect to the database, depending on the server's authentication configuration.
|
||||
@@ -46,27 +46,6 @@ DB_NAME="jellyseerr" # (optional) The name of the database to connect to. The de
|
||||
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
|
||||
```
|
||||
|
||||
:::info
|
||||
**Finding Your PostgreSQL Socket Path**
|
||||
|
||||
The PostgreSQL socket path varies by operating system and installation method:
|
||||
|
||||
- **Ubuntu/Debian**: `/var/run/postgresql`
|
||||
- **CentOS/RHEL/Fedora**: `/var/run/postgresql`
|
||||
- **macOS (Homebrew)**: `/tmp` or `/opt/homebrew/var/postgresql`
|
||||
- **macOS (Postgres.app)**: `/tmp`
|
||||
- **Windows**: Not applicable (uses TCP connections)
|
||||
|
||||
You can find your socket path by running:
|
||||
```bash
|
||||
# Find PostgreSQL socket directory
|
||||
find /tmp /var/run /run -name ".s.PGSQL.*" 2>/dev/null | head -1 | xargs dirname
|
||||
|
||||
# Or check PostgreSQL configuration
|
||||
sudo -u postgres psql -c "SHOW unix_socket_directories;"
|
||||
```
|
||||
:::
|
||||
|
||||
### SSL configuration
|
||||
|
||||
The following options can be used to further configure ssl. Certificates can be provided as a string or a file path, with the string version taking precedence.
|
||||
@@ -77,11 +56,10 @@ DB_SSL_REJECT_UNAUTHORIZED="true" # (optional) Whether to reject ssl connections
|
||||
DB_SSL_CA= # (optional) The CA certificate to verify the connection, provided as a string. The default is "".
|
||||
DB_SSL_CA_FILE= # (optional) The path to a CA certificate to verify the connection. The default is "".
|
||||
DB_SSL_KEY= # (optional) The private key for the connection in PEM format, provided as a string. The default is "".
|
||||
DB_SSL_KEY_FILE= # (optional) Path to the private key for the connection in PEM format. The default is "".
|
||||
DB_SSL_KEY_FILE= # (optinal) Path to the private key for the connection in PEM format. The default is "".
|
||||
DB_SSL_CERT= # (optional) Certificate chain in pem format for the private key, provided as a string. The default is "".
|
||||
DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the private key. The default is "".
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Migrating from SQLite to PostgreSQL
|
||||
@@ -90,76 +68,15 @@ DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the p
|
||||
2. Run Jellyseerr to create the tables in the PostgreSQL database
|
||||
3. Stop Jellyseerr
|
||||
4. Run the following command to export the data from the SQLite database and import it into the PostgreSQL database:
|
||||
|
||||
:::info
|
||||
Edit the postgres connection string (without the \{\{ and \}\} brackets) to match your setup.
|
||||
Edit the postgres connection string to match your setup.
|
||||
|
||||
If you don't have or don't want to use docker, you can build the working pgloader version [in this PR](https://github.com/dimitri/pgloader/pull/1531) from source and use the same options as below.
|
||||
:::
|
||||
|
||||
:::caution
|
||||
The most recent release of pgloader has an issue quoting the table columns. Use the version in the docker container to avoid this issue.
|
||||
:::
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="docker" label="Using pgloader Container (Recommended)" default>
|
||||
|
||||
**Recommended method**: Use the pgloader container even for standalone Jellyseerr installations. This avoids building from source and ensures compatibility.
|
||||
|
||||
```bash
|
||||
# For standalone installations (no Docker network needed)
|
||||
docker run --rm \
|
||||
-v /path/to/your/config/db.sqlite3:/db.sqlite3:ro \
|
||||
ghcr.io/ralgar/pgloader:pr-1531 \
|
||||
pgloader --with "quote identifiers" --with "data only" \
|
||||
/db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
|
||||
```
|
||||
|
||||
**For Docker Compose setups**: Add the network parameter if your PostgreSQL is also in a container:
|
||||
```bash
|
||||
docker run --rm \
|
||||
--network your-jellyseerr-network \
|
||||
-v /path/to/your/config/db.sqlite3:/db.sqlite3:ro \
|
||||
ghcr.io/ralgar/pgloader:pr-1531 \
|
||||
pgloader --with "quote identifiers" --with "data only" \
|
||||
/db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="standalone" label="Building pgloader from Source">
|
||||
|
||||
For users who prefer not to use Docker or need a custom build:
|
||||
|
||||
```bash
|
||||
# Clone the repository and checkout the working version
|
||||
git clone https://github.com/dimitri/pgloader.git
|
||||
cd pgloader
|
||||
git fetch origin pull/1531/head:pr-1531
|
||||
git checkout pr-1531
|
||||
|
||||
# Follow the official installation instructions
|
||||
# See: https://github.com/dimitri/pgloader/blob/master/INSTALL.md
|
||||
```
|
||||
|
||||
:::info
|
||||
**Building pgloader from source requires following the complete installation process outlined in the [official pgloader INSTALL.md](https://github.com/dimitri/pgloader/blob/master/INSTALL.md).**
|
||||
|
||||
Please refer to the official documentation for detailed, up-to-date installation instructions.
|
||||
:::
|
||||
|
||||
Once pgloader is built, run the migration:
|
||||
|
||||
```bash
|
||||
# Run migration (adjust path to your config directory)
|
||||
./pgloader --with "quote identifiers" --with "data only" \
|
||||
/path/to/your/config/db.sqlite3 \
|
||||
postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
docker run --rm -v config/db.sqlite3:/db.sqlite3:ro ghcr.io/ralgar/pgloader:pr-1531 pgloader --with "quote identifiers" --with "data only" /db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
|
||||
```
|
||||
5. Start Jellyseerr
|
||||
|
||||
@@ -207,62 +207,3 @@ labels:
|
||||
```
|
||||
|
||||
For more information, please refer to the [Traefik documentation](https://doc.traefik.io/traefik/user-guides/docker-compose/basic-example/).
|
||||
|
||||
## Apache2 HTTP Server
|
||||
|
||||
<Tabs groupId="apache2-reverse-proxy" queryString>
|
||||
<TabItem value="subdomain" label="Subdomain">
|
||||
|
||||
Add the following Location block to your existing Server configuration.
|
||||
|
||||
```apache
|
||||
# Jellyseerr
|
||||
ProxyPreserveHost On
|
||||
ProxyPass / http://localhost:5055 retry=0 connectiontimeout=5 timeout=30 keepalive=on
|
||||
ProxyPassReverse http://localhost:5055 /
|
||||
RequestHeader set Connection ""
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
|
||||
<TabItem value="subfolder" label="Subfolder">
|
||||
|
||||
:::warning
|
||||
This Apache2 subfolder reverse proxy is an unsupported workaround, and only provided as an example. The filters may stop working when Jellyseerr is updated.
|
||||
|
||||
If you encounter any issues with Jellyseerr while using this workaround, we may ask you to try to reproduce the problem without the Apache2 proxy.
|
||||
:::
|
||||
|
||||
Add the following Location block to your existing Server configuration.
|
||||
|
||||
```apache
|
||||
# Jellyseerr
|
||||
# We will use "/jellyseerr" as subfolder
|
||||
# You can replace it with any that you like
|
||||
<Location /jellyseerr>
|
||||
ProxyPreserveHost On
|
||||
ProxyPass http://localhost:5055 retry=0 connectiontimeout=5 timeout=30 keepalive=on
|
||||
ProxyPassReverse http://localhost:5055
|
||||
RequestHeader set Connection ""
|
||||
|
||||
# Header update, to support subfolder
|
||||
# Please Replace "FQDN" with your domain
|
||||
Header edit location ^/login https://FQDN/jellyseerr/login
|
||||
Header edit location ^/setup https://FQDN/jellyseerr/setup
|
||||
|
||||
AddOutputFilterByType INFLATE;SUBSTITUTE text/html application/javascript application/json
|
||||
SubstituteMaxLineLength 2000K
|
||||
# This is HTML and JS update
|
||||
# Please update "/jellyseerr" if needed
|
||||
Substitute "s|href=\"|href=\"/jellyseerr|inq"
|
||||
Substitute "s|src=\"|src=\"/jellyseerr|inq"
|
||||
Substitute "s|/api/|/jellyseerr/api/|inq"
|
||||
Substitute "s|\"/_next/|\"/jellyseerr/_next/|inq"
|
||||
# This is JSON update
|
||||
Substitute "s|\"/avatarproxy/|\"/jellyseerr/avatarproxy/|inq"
|
||||
</Location>
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
|
||||
</Tabs>
|
||||
|
||||
@@ -15,7 +15,7 @@ import TabItem from '@theme/TabItem';
|
||||
|
||||
### Prerequisites
|
||||
- [Node.js 22.x](https://nodejs.org/en/download/)
|
||||
- [Pnpm 10.x](https://pnpm.io/installation)
|
||||
- [Pnpm 9.x](https://pnpm.io/installation)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
|
||||
## Unix (Linux, macOS)
|
||||
|
||||
@@ -33,31 +33,20 @@ docker run -d \
|
||||
--name jellyseerr \
|
||||
-e LOG_LEVEL=debug \
|
||||
-e TZ=Asia/Tashkent \
|
||||
-e PORT=5055 \
|
||||
-e PORT=5055 `#optional` \
|
||||
-p 5055:5055 \
|
||||
-v /path/to/appdata/config:/app/config \
|
||||
--restart unless-stopped \
|
||||
fallenbagel/jellyseerr
|
||||
```
|
||||
|
||||
The argument `-e PORT=5055` is optional.
|
||||
|
||||
If you want to add a healthcheck to the above command, you can add the following flags :
|
||||
```
|
||||
--health-cmd "wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1" \
|
||||
--health-start-period 20s \
|
||||
--health-timeout 3s \
|
||||
--health-interval 15s \
|
||||
--health-retries 3 \
|
||||
```
|
||||
|
||||
To run the container as a specific user/group, you may optionally add `--user=[ user | user:group | uid | uid:gid | user:gid | uid:group ]` to the above command.
|
||||
|
||||
#### Updating:
|
||||
|
||||
Stop and remove the existing container:
|
||||
```bash
|
||||
docker stop jellyseerr && docker rm jellyseerr
|
||||
docker stop jellyseerr && docker rm Jellyseerr
|
||||
```
|
||||
Pull the latest image:
|
||||
```bash
|
||||
@@ -94,12 +83,6 @@ services:
|
||||
- 5055:5055
|
||||
volumes:
|
||||
- /path/to/appdata/config:/app/config
|
||||
healthcheck:
|
||||
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
|
||||
start_period: 20s
|
||||
timeout: 3s
|
||||
interval: 15s
|
||||
retries: 3
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
@@ -154,26 +137,7 @@ Then, create and start the Jellyseerr container:
|
||||
<Tabs groupId="docker-methods" queryString>
|
||||
<TabItem value="docker-cli" label="Docker CLI">
|
||||
```bash
|
||||
docker run -d \
|
||||
--name jellyseerr \
|
||||
-e LOG_LEVEL=debug \
|
||||
-e TZ=Asia/Tashkent \
|
||||
-e PORT=5055 \
|
||||
-p 5055:5055 \
|
||||
-v jellyseerr-data:/app/config \
|
||||
--restart unless-stopped \
|
||||
fallenbagel/jellyseerr
|
||||
```
|
||||
|
||||
The argument `-e PORT=5055` is optional.
|
||||
|
||||
If you want to add a healthcheck to the above command, you can add the following flags :
|
||||
```
|
||||
--health-cmd "wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1" \
|
||||
--health-start-period 20s \
|
||||
--health-timeout 3s \
|
||||
--health-interval 15s \
|
||||
--health-retries 3 \
|
||||
docker run -d --name jellyseerr -e LOG_LEVEL=debug -e TZ=Asia/Tashkent -p 5055:5055 -v "jellyseerr-data:/app/config" --restart unless-stopped fallenbagel/jellyseerr:latest
|
||||
```
|
||||
|
||||
#### Updating:
|
||||
@@ -201,12 +165,6 @@ services:
|
||||
- 5055:5055
|
||||
volumes:
|
||||
- jellyseerr-data:/app/config
|
||||
healthcheck:
|
||||
test: wget --no-verbose --tries=1 --spider http://localhost:5055/api/v1/status || exit 1
|
||||
start_period: 20s
|
||||
timeout: 3s
|
||||
interval: 15s
|
||||
retries: 3
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Kubernetes (Advanced)
|
||||
title: Kubernetes
|
||||
description: Install Jellyseerr in Kubernetes
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
---
|
||||
title: Gotify
|
||||
description: Configure Gotify notifications.
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Gotify
|
||||
|
||||
## Configuration
|
||||
|
||||
### Server URL
|
||||
|
||||
Set this to the URL of your Gotify server.
|
||||
|
||||
### Application Token
|
||||
|
||||
Add an application to your Gotify server, and set this field to the generated application token.
|
||||
|
||||
:::info
|
||||
Please refer to the [Gotify API documentation](https://gotify.net/docs) for more details on configuring these notifications.
|
||||
:::
|
||||
@@ -1,29 +0,0 @@
|
||||
---
|
||||
title: ntfy.sh
|
||||
description: Configure ntfy.sh notifications.
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# ntfy.sh
|
||||
|
||||
## Configuration
|
||||
|
||||
### Server Root URL
|
||||
|
||||
Set this to the URL of your ntfy.sh server.
|
||||
|
||||
### Topic
|
||||
|
||||
Set this to the topic you want to send notifications to.
|
||||
|
||||
### Username + Password authentication (optional)
|
||||
|
||||
Set this to the username and password for your ntfy.sh server.
|
||||
|
||||
### Token authentication (optional)
|
||||
|
||||
Set this to the token for your ntfy.sh server.
|
||||
|
||||
:::info
|
||||
Please refer to the [ntfy.sh API documentation](https://docs.ntfy.sh/) for more details on configuring these notifications.
|
||||
:::
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
title: Pushbullet
|
||||
description: Configure Pushbullet notifications.
|
||||
sidebar_position: 7
|
||||
---
|
||||
|
||||
# Pushbullet
|
||||
|
||||
:::info
|
||||
Users can optionally configure personal notifications in their user settings.
|
||||
|
||||
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
|
||||
:::
|
||||
|
||||
## Configuration
|
||||
|
||||
### Access Token
|
||||
|
||||
[Create an access token](https://www.pushbullet.com/#settings) and set it here to grant Jellyseerr access to the Pushbullet API.
|
||||
|
||||
### Channel Tag (optional)
|
||||
|
||||
Optionally, [create a channel](https://www.pushbullet.com/my-channel) to allow other users to follow the notification feed using the specified channel tag.
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
title: Pushover
|
||||
description: Configure Pushover notifications.
|
||||
sidebar_position: 8
|
||||
---
|
||||
|
||||
# Pushover
|
||||
|
||||
:::info
|
||||
Users can optionally configure personal notifications in their user settings.
|
||||
|
||||
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
|
||||
:::
|
||||
|
||||
## Configuration
|
||||
|
||||
### Application/API Token
|
||||
|
||||
[Register an application](https://pushover.net/apps/build) and enter the API token in this field. (You can use one of the [official icons in our GitHub repository](https://github.com/fallenbagel/jellyseerr/tree/develop/public) when configuring the application.)
|
||||
|
||||
For more details on registering applications or the API token, please see the [Pushover API documentation](https://pushover.net/api#registration).
|
||||
|
||||
### User Key
|
||||
|
||||
Set this to the user key for your Pushover account. Alternatively, you can set this to a group key to deliver notifications to multiple users.
|
||||
|
||||
For more details, please see the [Pushover API documentation](https://pushover.net/api#identifiers).
|
||||
@@ -1,17 +0,0 @@
|
||||
---
|
||||
title: Slack
|
||||
description: Configure Slack notifications.
|
||||
sidebar_position: 9
|
||||
---
|
||||
|
||||
# Slack
|
||||
|
||||
## Configuration
|
||||
|
||||
### Webhook URL
|
||||
|
||||
Simply [create a webhook](https://my.slack.com/services/new/incoming-webhook/) and enter the URL in this field.
|
||||
|
||||
:::info
|
||||
Please refer to the [Slack API documentation](https://api.slack.com/messaging/webhooks) for more details on configuring these notifications.
|
||||
:::
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
title: Telegram
|
||||
description: Configure Telegram notifications.
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
# Telegram
|
||||
|
||||
:::info
|
||||
Users can optionally configure personal notifications in their user settings.
|
||||
|
||||
User notifications are separate from system notifications, and the available notification types are dependent on user permissions.
|
||||
:::
|
||||
|
||||
## Configuration
|
||||
|
||||
:::info
|
||||
In order to configure Telegram notifications, you first need to [create a bot](https://telegram.me/BotFather).
|
||||
|
||||
Bots **cannot** initiate conversations with users, so users must have your bot added to a conversation in order to receive notifications.
|
||||
:::
|
||||
|
||||
### Bot Username (optional)
|
||||
|
||||
If this value is configured, users will be able to click a link to start a chat with your bot and configure their own personal notifications.
|
||||
|
||||
The bot username should end with `_bot`, and the `@` prefix should be omitted.
|
||||
|
||||
### Bot Authentication Token
|
||||
|
||||
At the end of the bot creation process, [@BotFather](https://telegram.me/botfather) will provide an authentication token.
|
||||
|
||||
### Chat ID
|
||||
|
||||
To obtain your chat ID, simply create a new group chat, add [@get_id_bot](https://telegram.me/get_id_bot), and issue the `/my_id` command.
|
||||
|
||||
### Send Silently (optional)
|
||||
|
||||
Optionally, notifications can be sent silently. Silent notifications send messages without notification sounds.
|
||||
@@ -1,138 +0,0 @@
|
||||
---
|
||||
title: Webhook
|
||||
description: Configure webhook notifications.
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Webhook
|
||||
|
||||
The webhook notification agent enables you to send a custom JSON payload to any endpoint for specific notification events.
|
||||
|
||||
## Configuration
|
||||
|
||||
### Webhook URL
|
||||
|
||||
The URL you would like to post notifications to. Your JSON will be sent as the body of the request.
|
||||
|
||||
### Authorization Header (optional)
|
||||
|
||||
:::info
|
||||
This is typically not needed. Please refer to your webhook provider's documentation for details.
|
||||
:::
|
||||
|
||||
This value will be sent as an `Authorization` HTTP header.
|
||||
|
||||
### JSON Payload
|
||||
|
||||
Customize the JSON payload to suit your needs. Jellyseerr provides several [template variables](#template-variables) for use in the payload, which will be replaced with the relevant data when the notifications are triggered.
|
||||
|
||||
## Template Variables
|
||||
|
||||
### General
|
||||
|
||||
| Variable | Value |
|
||||
| ----------------------- | ----------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `{{notification_type}}` | The type of notification (e.g. `MEDIA_PENDING` or `ISSUE_COMMENT`) |
|
||||
| `{{event}}` | A friendly description of the notification event |
|
||||
| `{{subject}}` | The notification subject (typically the media title) |
|
||||
| `{{message}}` | The notification message body (the media overview/synopsis for request notifications; the issue description for issue notificatons) |
|
||||
| `{{image}}` | The notification image (typically the media poster) |
|
||||
|
||||
### Notify User
|
||||
|
||||
These variables are for the target recipient of the notification.
|
||||
|
||||
| Variable | Value |
|
||||
| ---------------------------------------- | ------------------------------------------------------------- |
|
||||
| `{{notifyuser_username}}` | The target notification recipient's username |
|
||||
| `{{notifyuser_email}}` | The target notification recipient's email address |
|
||||
| `{{notifyuser_avatar}}` | The target notification recipient's avatar URL |
|
||||
| `{{notifyuser_settings_discordId}}` | The target notification recipient's Discord ID (if set) |
|
||||
| `{{notifyuser_settings_telegramChatId}}` | The target notification recipient's Telegram Chat ID (if set) |
|
||||
|
||||
:::info
|
||||
The `notifyuser` variables are not defined for the following request notification types, as they are intended for application administrators rather than end users:
|
||||
|
||||
- Request Pending Approval
|
||||
- Request Automatically Approved
|
||||
- Request Processing Failed
|
||||
|
||||
On the other hand, the `notifyuser` variables _will_ be replaced with the requesting user's information for the below notification types:
|
||||
|
||||
- Request Approved
|
||||
- Request Declined
|
||||
- Request Available
|
||||
|
||||
If you would like to use the requesting user's information in your webhook, please instead include the relevant variables from the [Request](#request) section below.
|
||||
:::
|
||||
|
||||
### Special
|
||||
|
||||
The following variables must be used as a key in the JSON payload (e.g., `"{{extra}}": []`).
|
||||
|
||||
| Variable | Value |
|
||||
| ------------- | ------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `{{media}}` | The relevant media object |
|
||||
| `{{request}}` | The relevant request object |
|
||||
| `{{issue}}` | The relevant issue object |
|
||||
| `{{comment}}` | The relevant issue comment object |
|
||||
| `{{extra}}` | The "extra" array of additional data for certain notifications (e.g., season/episode numbers for series-related notifications) |
|
||||
|
||||
#### Media
|
||||
|
||||
The `{{media}}` will be `null` if there is no relevant media object for the notification.
|
||||
|
||||
These following special variables are only included in media-related notifications, such as requests.
|
||||
|
||||
| Variable | Value |
|
||||
| -------------------- | -------------------------------------------------------------------------------------------------------------- |
|
||||
| `{{media_type}}` | The media type (`movie` or `tv`) |
|
||||
| `{{media_tmdbid}}` | The media's TMDB ID |
|
||||
| `{{media_tvdbid}}` | The media's TheTVDB ID |
|
||||
| `{{media_status}}` | The media's availability status (`UNKNOWN`, `PENDING`, `PROCESSING`, `PARTIALLY_AVAILABLE`, or `AVAILABLE`) |
|
||||
| `{{media_status4k}}` | The media's 4K availability status (`UNKNOWN`, `PENDING`, `PROCESSING`, `PARTIALLY_AVAILABLE`, or `AVAILABLE`) |
|
||||
|
||||
#### Request
|
||||
|
||||
The `{{request}}` will be `null` if there is no relevant media object for the notification.
|
||||
|
||||
The following special variables are only included in request-related notifications.
|
||||
|
||||
| Variable | Value |
|
||||
| ----------------------------------------- | ----------------------------------------------- |
|
||||
| `{{request_id}}` | The request ID |
|
||||
| `{{requestedBy_username}}` | The requesting user's username |
|
||||
| `{{requestedBy_email}}` | The requesting user's email address |
|
||||
| `{{requestedBy_avatar}}` | The requesting user's avatar URL |
|
||||
| `{{requestedBy_settings_discordId}}` | The requesting user's Discord ID (if set) |
|
||||
| `{{requestedBy_settings_telegramChatId}}` | The requesting user's Telegram Chat ID (if set) |
|
||||
|
||||
#### Issue
|
||||
|
||||
The `{{issue}}` will be `null` if there is no relevant media object for the notification.
|
||||
|
||||
The following special variables are only included in issue-related notifications.
|
||||
|
||||
| Variable | Value |
|
||||
| ---------------------------------------- | ----------------------------------------------- |
|
||||
| `{{issue_id}}` | The issue ID |
|
||||
| `{{reportedBy_username}}` | The requesting user's username |
|
||||
| `{{reportedBy_email}}` | The requesting user's email address |
|
||||
| `{{reportedBy_avatar}}` | The requesting user's avatar URL |
|
||||
| `{{reportedBy_settings_discordId}}` | The requesting user's Discord ID (if set) |
|
||||
| `{{reportedBy_settings_telegramChatId}}` | The requesting user's Telegram Chat ID (if set) |
|
||||
|
||||
#### Comment
|
||||
|
||||
The `{{comment}}` will be `null` if there is no relevant media object for the notification.
|
||||
|
||||
The following special variables are only included in issue comment-related notifications.
|
||||
|
||||
| Variable | Value |
|
||||
| ----------------------------------------- | ----------------------------------------------- |
|
||||
| `{{comment_message}}` | The comment message |
|
||||
| `{{commentedBy_username}}` | The commenting user's username |
|
||||
| `{{commentedBy_email}}` | The commenting user's email address |
|
||||
| `{{commentedBy_avatar}}` | The commenting user's avatar URL |
|
||||
| `{{commentedBy_settings_discordId}}` | The commenting user's Discord ID (if set) |
|
||||
| `{{commentedBy_settings_telegramChatId}}` | The commenting user's Telegram Chat ID (if set) |
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"label": "Plex Integration",
|
||||
"position": 3,
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"title": "Plex Integration",
|
||||
"description": "Learn about Jellyseerr's Plex integration features"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: Overview
|
||||
description: Learn about Jellyseerr's Plex integration features
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Plex Features Overview
|
||||
|
||||
Jellyseerr provides integration features that connect with your Plex media server to automate media management tasks.
|
||||
|
||||
## Available Features
|
||||
|
||||
- [Watchlist Auto Request](./plex/watchlist-auto-request) - Automatically request media from your Plex Watchlist
|
||||
- More features coming soon!
|
||||
|
||||
## Prerequisites
|
||||
|
||||
:::info Authentication Required
|
||||
To use any Plex integration features, you must have logged into Jellyseerr at least once with your Plex account.
|
||||
:::
|
||||
|
||||
**Requirements:**
|
||||
- Plex account with access to the configured Plex server
|
||||
- Jellyseerr configured with Plex as the media server
|
||||
- User authentication via Plex login
|
||||
- Appropriate user permissions for specific features
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Authenticate at least once using your Plex credentials
|
||||
2. Verify you have the necessary permissions for desired features
|
||||
3. Follow individual feature guides for setup instructions
|
||||
|
||||
:::note Server Configuration
|
||||
Plex server configuration is handled by your administrator. If you cannot log in with your Plex account, contact your administrator to verify the server setup.
|
||||
:::
|
||||
@@ -1,95 +0,0 @@
|
||||
---
|
||||
title: Watchlist Auto Request
|
||||
description: Learn how to use the Plex Watchlist Auto Request feature
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Watchlist Auto Request
|
||||
|
||||
The Plex Watchlist Auto Request feature allows Jellyseerr to automatically create requests for media items you add to your Plex Watchlist. Simply add content to your Plex Watchlist, and Jellyseerr will automatically request it for you.
|
||||
|
||||
:::info
|
||||
This feature is only available for Plex users. Local users cannot use the Watchlist Auto Request feature.
|
||||
:::
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- You must have logged into Jellyseerr at least once with your Plex account
|
||||
- Your administrator must have granted you the necessary permissions
|
||||
- Your Plex account must have access to the Plex server configured in Jellyseerr
|
||||
|
||||
## Permission System
|
||||
|
||||
The Watchlist Auto Request feature uses a two-tier permission system:
|
||||
|
||||
### Administrator Permissions (Required)
|
||||
Your administrator must grant you these permissions in your user profile:
|
||||
- **Auto-Request** (master permission)
|
||||
- **Auto-Request Movies** (for movie auto-requests)
|
||||
- **Auto-Request Series** (for TV series auto-requests)
|
||||
|
||||
### User Activation (Required)
|
||||
You must enable the feature in your own profile settings:
|
||||
- **Auto-Request Movies** toggle
|
||||
- **Auto-Request Series** toggle
|
||||
|
||||
:::warning Two-Step Process
|
||||
Both administrator permissions AND user activation are required. Having permissions doesn't automatically enable the feature - you must also activate it in your profile.
|
||||
:::
|
||||
|
||||
## How to Enable
|
||||
|
||||
### Step 1: Check Your Permissions
|
||||
Contact your administrator to verify you have been granted:
|
||||
- `Auto-Request` permission
|
||||
- `Auto-Request Movies` and/or `Auto-Request Series` permissions
|
||||
|
||||
### Step 2: Activate the Feature
|
||||
1. Go to your user profile settings
|
||||
2. Navigate to the "General" section
|
||||
3. Find the "Auto-Request" options
|
||||
4. Enable the toggles for:
|
||||
- **Auto-Request Movies** - to automatically request movies from your watchlist
|
||||
- **Auto-Request Series** - to automatically request TV series from your watchlist
|
||||
|
||||
### Step 3: Start Using
|
||||
- Add movies and TV shows to your Plex Watchlist
|
||||
- Jellyseerr will automatically create requests for new items
|
||||
- You'll receive notifications when items are auto-requested
|
||||
|
||||
## How It Works
|
||||
|
||||
Once properly configured, Jellyseerr will:
|
||||
|
||||
1. Periodically checks your Plex Watchlist for new items
|
||||
2. Verify if the content already exists in your media libraries
|
||||
3. Automatically submits requests for new items that aren't already available
|
||||
4. Only requests content types you have permissions for
|
||||
5. Notifiy you when auto-requests are created
|
||||
|
||||
:::info Content Limitations
|
||||
Auto-request only works for standard quality content. 4K content must be requested manually if you have 4K permissions.
|
||||
:::
|
||||
|
||||
## For Administrators
|
||||
|
||||
### Granting Permissions
|
||||
1. Navigate to **Users** > **[Select User]** > **Permissions**
|
||||
2. Enable the required permissions:
|
||||
- **Auto-Request** (master toggle)
|
||||
- **Auto-Request Movies** (for movie auto-requests)
|
||||
- **Auto-Request Series** (for TV series auto-requests)
|
||||
3. Optionally enable **Auto-Approve** permissions for automatic approval
|
||||
|
||||
### Default Permissions
|
||||
- Go to **Settings** > **Users** > **Default Permissions**
|
||||
- Configure auto-request permissions for new users
|
||||
- This sets the default permissions but users still need to activate the feature individually
|
||||
|
||||
## Limitations
|
||||
|
||||
- Local users cannot use this feature
|
||||
- 4K content requires manual requests
|
||||
- Users must have logged into Jellyseerr with their Plex account
|
||||
- Respects user request limits and quotas
|
||||
- Won't request content already in your libraries
|
||||
@@ -1,16 +0,0 @@
|
||||
---
|
||||
title: DNS Caching
|
||||
description: Configure DNS caching settings.
|
||||
sidebar_position: 7
|
||||
---
|
||||
|
||||
# DNS Caching
|
||||
|
||||
Jellyseerr uses DNS caching to improve performance and reduce the number of DNS lookups required for external API calls. This can help speed up response times and reduce load on DNS servers, when something like a Pi-hole is used as a DNS resolver.
|
||||
|
||||
## Configuration
|
||||
|
||||
You can enable the DNS caching settings in the Network tab of the Jellyseerr settings. The default values follow the standard DNS caching behavior.
|
||||
|
||||
- **Force Minimum TTL**: Set a minimum time-to-live (TTL) in seconds for DNS cache entries. This ensures that frequently accessed DNS records are cached for a longer period, reducing the need for repeated lookups. Default is 0.
|
||||
- **Force Maximum TTL**: Set a maximum time-to-live (TTL) in seconds for DNS cache entries. This prevents infrequently accessed DNS records from being cached indefinitely, allowing for more up-to-date information to be retrieved. Default is -1 (unlimited).
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
title: Jobs & Cache
|
||||
description: Configure jobs and cache settings.
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# Jobs & Cache
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
---
|
||||
title: Welcome to the Jellyseerr Blog
|
||||
description: The official Jellyseerr blog for release notes, technical updates, and community news.
|
||||
slug: welcome
|
||||
authors: [fallenbagel, gauthier-th]
|
||||
tags: [announcement, jellyseerr, blog]
|
||||
image: https://raw.githubusercontent.com/fallenbagel/jellyseerr/refs/heads/develop/gen-docs/static/img/logo.svg
|
||||
hide_table_of_contents: false
|
||||
---
|
||||
|
||||
We are pleased to introduce the official Jellyseerr blog.
|
||||
|
||||
This space will serve as the central place for:
|
||||
|
||||
- Release announcements
|
||||
- Updates on new features and improvements
|
||||
- Technical articles, such as details on our [**DNS caching package**](https://github.com/jellyseerr/dns-caching) and other enhancements
|
||||
- Community-related news
|
||||
|
||||
<!--truncate-->
|
||||
|
||||
Our goal is to keep the community informed and provide deeper insights into the ongoing development of Jellyseerr.
|
||||
|
||||
Thank you for being part of the Jellyseerr project. More updates will follow soon.
|
||||
@@ -1,21 +0,0 @@
|
||||
fallenbagel:
|
||||
name: Fallenbagel
|
||||
page: true
|
||||
title: Developer & Maintainer of Jellyseerr
|
||||
description: Core Maintainer & Developer of Jellyseerr | Full-Stack Software Engineer | MSc Software Engineering Candidate.
|
||||
url: https://github.com/fallenbagel
|
||||
image_url: https://github.com/fallenbagel.png
|
||||
email: hello@fallenbagel.com
|
||||
socials:
|
||||
github: fallenbagel
|
||||
|
||||
gauthier-th:
|
||||
name: Gauthier
|
||||
page: true
|
||||
title: Co-Developer & Co-Maintainer of Jellyseerr
|
||||
description: Co-Maintainer & Developer of Jellyseerr | PhD Student in AI at ICB, Dijon
|
||||
url: https://gauthierth.fr
|
||||
image_url: https://github.com/gauthier-th.png
|
||||
email: mail@gauthierth.fr
|
||||
socials:
|
||||
github: gauthier-th
|
||||
@@ -34,6 +34,7 @@ const config: Config = {
|
||||
editUrl:
|
||||
'https://github.com/fallenbagel/jellyseerr/edit/develop/docs/',
|
||||
},
|
||||
blog: false,
|
||||
pages: false,
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
@@ -68,11 +69,6 @@ const config: Config = {
|
||||
src: 'img/logo.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: 'blog',
|
||||
label: 'Blog',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/fallenbagel/jellyseerr',
|
||||
label: 'GitHub',
|
||||
@@ -92,19 +88,6 @@ const config: Config = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Project',
|
||||
items: [
|
||||
{
|
||||
label: 'Blog',
|
||||
to: '/blog',
|
||||
},
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/fallenbagel/jellyseerr',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"name": "gen-docs",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.17.1",
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start",
|
||||
@@ -16,9 +15,9 @@
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.9.1",
|
||||
"@docusaurus/preset-classic": "3.9.1",
|
||||
"@easyops-cn/docusaurus-search-local": "^0.52.1",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/preset-classic": "3.4.0",
|
||||
"@easyops-cn/docusaurus-search-local": "^0.44.2",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
@@ -27,11 +26,14 @@
|
||||
"tailwindcss": "^3.4.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.9.1",
|
||||
"@docusaurus/tsconfig": "3.9.1",
|
||||
"@docusaurus/types": "3.9.1",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/tsconfig": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"typescript": "~5.2.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"prismjs": "PrismJS/prism"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.5%",
|
||||
|
||||
8738
gen-docs/pnpm-lock.yaml
generated
8738
gen-docs/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -141,83 +141,14 @@ components:
|
||||
UserSettings:
|
||||
type: object
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
nullable: true
|
||||
example: 'Mr User'
|
||||
email:
|
||||
type: string
|
||||
example: 'user@example.com'
|
||||
discordId:
|
||||
type: string
|
||||
nullable: true
|
||||
example: '123456789'
|
||||
locale:
|
||||
type: string
|
||||
nullable: true
|
||||
example: 'en'
|
||||
discoverRegion:
|
||||
type: string
|
||||
nullable: true
|
||||
example: 'US'
|
||||
streamingRegion:
|
||||
type: string
|
||||
nullable: true
|
||||
example: 'US'
|
||||
originalLanguage:
|
||||
type: string
|
||||
nullable: true
|
||||
example: 'en'
|
||||
movieQuotaLimit:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Maximum number of movie requests allowed'
|
||||
example: 10
|
||||
movieQuotaDays:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Time period in days for movie quota'
|
||||
example: 30
|
||||
tvQuotaLimit:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Maximum number of TV requests allowed'
|
||||
example: 5
|
||||
tvQuotaDays:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Time period in days for TV quota'
|
||||
example: 14
|
||||
globalMovieQuotaDays:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Global movie quota days setting'
|
||||
example: 30
|
||||
globalMovieQuotaLimit:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Global movie quota limit setting'
|
||||
example: 10
|
||||
globalTvQuotaLimit:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Global TV quota limit setting'
|
||||
example: 5
|
||||
globalTvQuotaDays:
|
||||
type: number
|
||||
nullable: true
|
||||
description: 'Global TV quota days setting'
|
||||
example: 14
|
||||
watchlistSyncMovies:
|
||||
type: boolean
|
||||
nullable: true
|
||||
description: 'Enable watchlist sync for movies'
|
||||
example: true
|
||||
watchlistSyncTv:
|
||||
type: boolean
|
||||
nullable: true
|
||||
description: 'Enable watchlist sync for TV'
|
||||
example: false
|
||||
streamingRegion:
|
||||
type: string
|
||||
MainSettings:
|
||||
type: object
|
||||
properties:
|
||||
@@ -260,51 +191,9 @@ components:
|
||||
csrfProtection:
|
||||
type: boolean
|
||||
example: false
|
||||
forceIpv4First:
|
||||
type: boolean
|
||||
example: false
|
||||
trustProxy:
|
||||
type: boolean
|
||||
example: false
|
||||
proxy:
|
||||
type: object
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
example: false
|
||||
hostname:
|
||||
type: string
|
||||
example: ''
|
||||
port:
|
||||
type: number
|
||||
example: 8080
|
||||
useSsl:
|
||||
type: boolean
|
||||
example: false
|
||||
user:
|
||||
type: string
|
||||
example: ''
|
||||
password:
|
||||
type: string
|
||||
example: ''
|
||||
bypassFilter:
|
||||
type: string
|
||||
example: ''
|
||||
bypassLocalAddresses:
|
||||
type: boolean
|
||||
example: true
|
||||
dnsCache:
|
||||
type: object
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
example: false
|
||||
forceMinTtl:
|
||||
type: number
|
||||
example: 0
|
||||
forceMaxTtl:
|
||||
type: number
|
||||
example: -1
|
||||
example: true
|
||||
PlexLibrary:
|
||||
type: object
|
||||
properties:
|
||||
@@ -519,20 +408,6 @@ components:
|
||||
serverID:
|
||||
type: string
|
||||
readOnly: true
|
||||
MetadataSettings:
|
||||
type: object
|
||||
properties:
|
||||
settings:
|
||||
type: object
|
||||
properties:
|
||||
tv:
|
||||
type: string
|
||||
enum: [tvdb, tmdb]
|
||||
example: 'tvdb'
|
||||
anime:
|
||||
type: string
|
||||
enum: [tvdb, tmdb]
|
||||
example: 'tvdb'
|
||||
TautulliSettings:
|
||||
type: object
|
||||
properties:
|
||||
@@ -1451,9 +1326,6 @@ components:
|
||||
type: string
|
||||
jsonPayload:
|
||||
type: string
|
||||
supportVariables:
|
||||
type: boolean
|
||||
example: false
|
||||
TelegramSettings:
|
||||
type: object
|
||||
properties:
|
||||
@@ -1553,6 +1425,22 @@ components:
|
||||
type: boolean
|
||||
token:
|
||||
type: string
|
||||
LunaSeaSettings:
|
||||
type: object
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
example: false
|
||||
types:
|
||||
type: number
|
||||
example: 2
|
||||
options:
|
||||
type: object
|
||||
properties:
|
||||
webhookUrl:
|
||||
type: string
|
||||
profileName:
|
||||
type: string
|
||||
NotificationEmailSettings:
|
||||
type: object
|
||||
properties:
|
||||
@@ -2585,67 +2473,6 @@ paths:
|
||||
type: string
|
||||
thumb:
|
||||
type: string
|
||||
/settings/metadatas:
|
||||
get:
|
||||
summary: Get Metadata settings
|
||||
description: Retrieves current Metadata settings.
|
||||
tags:
|
||||
- settings
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/MetadataSettings'
|
||||
put:
|
||||
summary: Update Metadata settings
|
||||
description: Updates Metadata settings with the provided values.
|
||||
tags:
|
||||
- settings
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/MetadataSettings'
|
||||
responses:
|
||||
'200':
|
||||
description: 'Values were successfully updated'
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/MetadataSettings'
|
||||
/settings/metadatas/test:
|
||||
post:
|
||||
summary: Test Provider configuration
|
||||
description: Tests if the TVDB configuration is valid. Returns a list of available languages on success.
|
||||
tags:
|
||||
- settings
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
tmdb:
|
||||
type: boolean
|
||||
example: true
|
||||
tvdb:
|
||||
type: boolean
|
||||
example: true
|
||||
responses:
|
||||
'200':
|
||||
description: Succesfully connected to TVDB
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
example: 'Successfully connected to TVDB'
|
||||
/settings/tautulli:
|
||||
get:
|
||||
summary: Get Tautulli settings
|
||||
@@ -3087,68 +2914,6 @@ paths:
|
||||
imageCount:
|
||||
type: number
|
||||
example: 123
|
||||
dnsCache:
|
||||
type: object
|
||||
properties:
|
||||
stats:
|
||||
type: object
|
||||
properties:
|
||||
size:
|
||||
type: number
|
||||
example: 1
|
||||
maxSize:
|
||||
type: number
|
||||
example: 500
|
||||
hits:
|
||||
type: number
|
||||
example: 19
|
||||
misses:
|
||||
type: number
|
||||
example: 1
|
||||
failures:
|
||||
type: number
|
||||
example: 0
|
||||
ipv4Fallbacks:
|
||||
type: number
|
||||
example: 0
|
||||
hitRate:
|
||||
type: number
|
||||
example: 0.95
|
||||
entries:
|
||||
type: array
|
||||
additionalProperties:
|
||||
type: object
|
||||
properties:
|
||||
addresses:
|
||||
type: object
|
||||
properties:
|
||||
ipv4:
|
||||
type: number
|
||||
example: 1
|
||||
ipv6:
|
||||
type: number
|
||||
example: 1
|
||||
activeAddress:
|
||||
type: string
|
||||
example: 127.0.0.1
|
||||
family:
|
||||
type: number
|
||||
example: 4
|
||||
age:
|
||||
type: number
|
||||
example: 10
|
||||
ttl:
|
||||
type: number
|
||||
example: 10
|
||||
networkErrors:
|
||||
type: number
|
||||
example: 0
|
||||
hits:
|
||||
type: number
|
||||
example: 1
|
||||
misses:
|
||||
type: number
|
||||
example: 1
|
||||
apiCaches:
|
||||
type: array
|
||||
items:
|
||||
@@ -3188,21 +2953,6 @@ paths:
|
||||
responses:
|
||||
'204':
|
||||
description: 'Flushed cache'
|
||||
/settings/cache/dns/{dnsEntry}/flush:
|
||||
post:
|
||||
summary: Flush a specific DNS cache entry
|
||||
description: Flushes a specific DNS cache entry
|
||||
tags:
|
||||
- settings
|
||||
parameters:
|
||||
- in: path
|
||||
name: dnsEntry
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'204':
|
||||
description: 'Flushed dns cache'
|
||||
/settings/logs:
|
||||
get:
|
||||
summary: Returns logs
|
||||
@@ -3349,6 +3099,52 @@ paths:
|
||||
responses:
|
||||
'204':
|
||||
description: Test notification attempted
|
||||
/settings/notifications/lunasea:
|
||||
get:
|
||||
summary: Get LunaSea notification settings
|
||||
description: Returns current LunaSea notification settings in a JSON object.
|
||||
tags:
|
||||
- settings
|
||||
responses:
|
||||
'200':
|
||||
description: Returned LunaSea settings
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/LunaSeaSettings'
|
||||
post:
|
||||
summary: Update LunaSea notification settings
|
||||
description: Updates LunaSea notification settings with the provided values.
|
||||
tags:
|
||||
- settings
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/LunaSeaSettings'
|
||||
responses:
|
||||
'200':
|
||||
description: 'Values were sucessfully updated'
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/LunaSeaSettings'
|
||||
/settings/notifications/lunasea/test:
|
||||
post:
|
||||
summary: Test LunaSea settings
|
||||
description: Sends a test notification to the LunaSea agent.
|
||||
tags:
|
||||
- settings
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/LunaSeaSettings'
|
||||
responses:
|
||||
'204':
|
||||
description: Test notification attempted
|
||||
/settings/notifications/pushbullet:
|
||||
get:
|
||||
summary: Get Pushbullet notification settings
|
||||
@@ -4735,7 +4531,11 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/UserSettings'
|
||||
type: object
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
example: 'Mr User'
|
||||
post:
|
||||
summary: Update general settings for a user
|
||||
description: Updates and returns general settings for a specific user. Requires `MANAGE_USERS` permission if editing other users.
|
||||
@@ -4752,14 +4552,22 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/UserSettings'
|
||||
type: object
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
nullable: true
|
||||
responses:
|
||||
'200':
|
||||
description: Updated user general settings returned
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/UserSettings'
|
||||
type: object
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
example: 'Mr User'
|
||||
/user/{userId}/settings/password:
|
||||
get:
|
||||
summary: Get password page informatiom
|
||||
@@ -5198,12 +5006,6 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
example: 1,2
|
||||
- in: query
|
||||
name: excludeKeywords
|
||||
schema:
|
||||
type: string
|
||||
example: 3,4
|
||||
description: Comma-separated list of keyword IDs to exclude from results
|
||||
- in: query
|
||||
name: sortBy
|
||||
schema:
|
||||
@@ -5524,12 +5326,6 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
example: 1,2
|
||||
- in: query
|
||||
name: excludeKeywords
|
||||
schema:
|
||||
type: string
|
||||
example: 3,4
|
||||
description: Comma-separated list of keyword IDs to exclude from results
|
||||
- in: query
|
||||
name: sortBy
|
||||
schema:
|
||||
@@ -6153,7 +5949,7 @@ paths:
|
||||
get:
|
||||
summary: Gets request counts
|
||||
description: |
|
||||
Returns the number of requests by status including pending, approved, available, and completed requests.
|
||||
Returns the number of pending and approved requests.
|
||||
tags:
|
||||
- request
|
||||
responses:
|
||||
@@ -6180,8 +5976,6 @@ paths:
|
||||
type: number
|
||||
available:
|
||||
type: number
|
||||
completed:
|
||||
type: number
|
||||
/request/{requestId}:
|
||||
get:
|
||||
summary: Get MediaRequest
|
||||
@@ -6564,7 +6358,7 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/TvDetails'
|
||||
/tv/{tvId}/season/{seasonNumber}:
|
||||
/tv/{tvId}/season/{seasonId}:
|
||||
get:
|
||||
summary: Get season details and episode list
|
||||
description: Returns season details with a list of episodes in a JSON object.
|
||||
@@ -6578,11 +6372,11 @@ paths:
|
||||
type: number
|
||||
example: 76479
|
||||
- in: path
|
||||
name: seasonNumber
|
||||
name: seasonId
|
||||
required: true
|
||||
schema:
|
||||
type: number
|
||||
example: 123456
|
||||
example: 1
|
||||
- in: query
|
||||
name: language
|
||||
schema:
|
||||
@@ -6867,16 +6661,9 @@ paths:
|
||||
example: '1'
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: is4k
|
||||
description: Whether to remove from 4K service instance (true) or regular service instance (false)
|
||||
required: false
|
||||
example: false
|
||||
schema:
|
||||
type: boolean
|
||||
responses:
|
||||
'204':
|
||||
description: Successfully removed media item
|
||||
description: Succesfully removed media item
|
||||
/media/{mediaId}/{status}:
|
||||
post:
|
||||
summary: Update media status
|
||||
@@ -7543,22 +7330,11 @@ paths:
|
||||
example: 1
|
||||
responses:
|
||||
'200':
|
||||
description: Keyword returned (null if not found)
|
||||
description: Keyword returned
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
nullable: true
|
||||
$ref: '#/components/schemas/Keyword'
|
||||
'500':
|
||||
description: Internal server error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
example: 'Unable to retrieve keyword data.'
|
||||
/watchproviders/regions:
|
||||
get:
|
||||
summary: Get watch provider regions
|
||||
|
||||
83
package.json
83
package.json
@@ -2,7 +2,6 @@
|
||||
"name": "jellyseerr",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.17.1",
|
||||
"scripts": {
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"postinstall": "node postinstall-win.js",
|
||||
@@ -47,7 +46,7 @@
|
||||
"@types/ua-parser-js": "^0.7.36",
|
||||
"@types/wink-jaro-distance": "^2.0.2",
|
||||
"ace-builds": "1.15.2",
|
||||
"axios": "1.10.0",
|
||||
"axios": "1.3.4",
|
||||
"axios-rate-limit": "1.3.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bowser": "2.11.0",
|
||||
@@ -58,7 +57,6 @@
|
||||
"cronstrue": "2.23.0",
|
||||
"date-fns": "2.29.3",
|
||||
"dayjs": "1.11.7",
|
||||
"dns-caching": "^0.2.7",
|
||||
"email-templates": "12.0.1",
|
||||
"email-validator": "2.0.4",
|
||||
"express": "4.21.2",
|
||||
@@ -119,6 +117,10 @@
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "17.4.4",
|
||||
"@commitlint/config-conventional": "17.4.4",
|
||||
"@semantic-release/changelog": "6.0.2",
|
||||
"@semantic-release/commit-analyzer": "9.0.2",
|
||||
"@semantic-release/exec": "6.0.3",
|
||||
"@semantic-release/git": "10.0.1",
|
||||
"@tailwindcss/aspect-ratio": "0.4.2",
|
||||
"@tailwindcss/forms": "0.5.10",
|
||||
"@tailwindcss/typography": "0.5.16",
|
||||
@@ -168,6 +170,8 @@
|
||||
"prettier": "2.8.4",
|
||||
"prettier-plugin-organize-imports": "3.2.2",
|
||||
"prettier-plugin-tailwindcss": "0.2.3",
|
||||
"semantic-release": "19.0.5",
|
||||
"semantic-release-docker-buildx": "1.0.1",
|
||||
"tailwindcss": "3.2.7",
|
||||
"ts-node": "10.9.1",
|
||||
"tsc-alias": "1.8.2",
|
||||
@@ -176,7 +180,7 @@
|
||||
},
|
||||
"engines": {
|
||||
"node": "^22.0.0",
|
||||
"pnpm": "^10.0.0"
|
||||
"pnpm": "^9.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
"sqlite3/node-gyp": "8.4.1",
|
||||
@@ -205,50 +209,24 @@
|
||||
"plugins": [
|
||||
"@semantic-release/commit-analyzer",
|
||||
"@semantic-release/release-notes-generator",
|
||||
[
|
||||
"@semantic-release/changelog",
|
||||
{
|
||||
"changelogFile": "CHANGELOG.md"
|
||||
}
|
||||
],
|
||||
"@semantic-release/npm",
|
||||
[
|
||||
"@codedependant/semantic-release-docker",
|
||||
"@semantic-release/git",
|
||||
{
|
||||
"dockerArgs": {
|
||||
"COMMIT_TAG": "${GITHUB_SHA}"
|
||||
},
|
||||
"dockerLogin": false,
|
||||
"dockerProject": "fallenbagel",
|
||||
"dockerImage": "jellyseerr",
|
||||
"dockerTags": [
|
||||
"latest",
|
||||
"{{major}}",
|
||||
"{{major}}.{{minor}}",
|
||||
"{{major}}.{{minor}}.{{patch}}"
|
||||
"assets": [
|
||||
"package.json",
|
||||
"CHANGELOG.md"
|
||||
],
|
||||
"dockerPlatform": [
|
||||
"linux/amd64",
|
||||
"linux/arm64"
|
||||
]
|
||||
}
|
||||
],
|
||||
[
|
||||
"@codedependant/semantic-release-docker",
|
||||
{
|
||||
"dockerArgs": {
|
||||
"COMMIT_TAG": "${GITHUB_SHA}"
|
||||
},
|
||||
"dockerLogin": false,
|
||||
"dockerRegistry": "ghcr.io",
|
||||
"dockerProject": "fallenbagel",
|
||||
"dockerImage": "jellyseerr",
|
||||
"dockerTags": [
|
||||
"latest",
|
||||
"{{major}}",
|
||||
"{{major}}.{{minor}}",
|
||||
"{{major}}.{{minor}}.{{patch}}"
|
||||
],
|
||||
"dockerPlatform": [
|
||||
"linux/amd64",
|
||||
"linux/arm64"
|
||||
]
|
||||
"message": "chore(release): ${nextRelease.version}"
|
||||
}
|
||||
],
|
||||
"semantic-release-docker-buildx",
|
||||
[
|
||||
"@semantic-release/github",
|
||||
{
|
||||
@@ -261,14 +239,21 @@
|
||||
],
|
||||
"npmPublish": false,
|
||||
"publish": [
|
||||
"@codedependant/semantic-release-docker",
|
||||
{
|
||||
"path": "semantic-release-docker-buildx",
|
||||
"buildArgs": {
|
||||
"COMMIT_TAG": "$GIT_SHA"
|
||||
},
|
||||
"imageNames": [
|
||||
"fallenbagel/jellyseerr",
|
||||
"ghcr.io/fallenbagel/jellyseerr"
|
||||
],
|
||||
"platforms": [
|
||||
"linux/amd64",
|
||||
"linux/arm64"
|
||||
]
|
||||
},
|
||||
"@semantic-release/github"
|
||||
]
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"sqlite3",
|
||||
"bcrypt"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
2304
pnpm-lock.yaml
generated
2304
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -48,7 +48,6 @@ export interface AnidbItem {
|
||||
tvdbId?: number;
|
||||
tmdbId?: number;
|
||||
imdbId?: string;
|
||||
tvdbSeason?: number;
|
||||
}
|
||||
|
||||
class AnimeListMapping {
|
||||
@@ -98,7 +97,6 @@ class AnimeListMapping {
|
||||
tvdbId: anime.$.defaulttvdbseason === '0' ? undefined : tvdbId,
|
||||
tmdbId: tmdbId,
|
||||
imdbId: imdbIds[0], // this is used for one AniDB -> one imdb movie mapping
|
||||
tvdbSeason: Number(anime.$.defaulttvdbseason),
|
||||
};
|
||||
|
||||
if (tvdbId) {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
|
||||
import type { AxiosInstance, AxiosRequestConfig } from 'axios';
|
||||
import axios from 'axios';
|
||||
import rateLimit from 'axios-rate-limit';
|
||||
@@ -10,7 +9,7 @@ const DEFAULT_TTL = 300;
|
||||
// 10 seconds default rolling buffer (in ms)
|
||||
const DEFAULT_ROLLING_BUFFER = 10000;
|
||||
|
||||
export interface ExternalAPIOptions {
|
||||
interface ExternalAPIOptions {
|
||||
nodeCache?: NodeCache;
|
||||
headers?: Record<string, unknown>;
|
||||
rateLimit?: {
|
||||
@@ -38,7 +37,8 @@ class ExternalAPI {
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
this.axios.interceptors.request.use(requestInterceptorFunction);
|
||||
this.axios.interceptors.request = axios.interceptors.request;
|
||||
this.axios.interceptors.response = axios.interceptors.response;
|
||||
|
||||
if (options.rateLimit) {
|
||||
this.axios = rateLimit(this.axios, {
|
||||
|
||||
@@ -103,7 +103,6 @@ export interface JellyfinLibraryItemExtended extends JellyfinLibraryItem {
|
||||
Tmdb?: string;
|
||||
Imdb?: string;
|
||||
Tvdb?: string;
|
||||
AniDB?: string;
|
||||
};
|
||||
MediaSources?: JellyfinMediaSource[];
|
||||
Width?: number;
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
import type { TvShowProvider } from '@server/api/provider';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import Tvdb from '@server/api/tvdb';
|
||||
import { getSettings, MetadataProviderType } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
|
||||
export const getMetadataProvider = async (
|
||||
mediaType: 'movie' | 'tv' | 'anime'
|
||||
): Promise<TvShowProvider> => {
|
||||
try {
|
||||
const settings = await getSettings();
|
||||
|
||||
if (mediaType == 'movie') {
|
||||
return new TheMovieDb();
|
||||
}
|
||||
|
||||
if (
|
||||
mediaType == 'tv' &&
|
||||
settings.metadataSettings.tv == MetadataProviderType.TVDB
|
||||
) {
|
||||
return await Tvdb.getInstance();
|
||||
}
|
||||
|
||||
if (
|
||||
mediaType == 'anime' &&
|
||||
settings.metadataSettings.anime == MetadataProviderType.TVDB
|
||||
) {
|
||||
return await Tvdb.getInstance();
|
||||
}
|
||||
|
||||
return new TheMovieDb();
|
||||
} catch (e) {
|
||||
logger.error('Failed to get metadata provider', {
|
||||
label: 'Metadata',
|
||||
message: e.message,
|
||||
});
|
||||
return new TheMovieDb();
|
||||
}
|
||||
};
|
||||
@@ -113,7 +113,7 @@ interface MetadataResponse {
|
||||
ratingKey: string;
|
||||
type: 'movie' | 'show';
|
||||
title: string;
|
||||
Guid?: {
|
||||
Guid: {
|
||||
id: `imdb://tt${number}` | `tmdb://${number}` | `tvdb://${number}`;
|
||||
}[];
|
||||
}[];
|
||||
@@ -277,18 +277,9 @@ class PlexTvAPI extends ExternalAPI {
|
||||
}> {
|
||||
try {
|
||||
const watchlistCache = cacheManager.getCache('plexwatchlist');
|
||||
logger.debug('Fetching watchlist from Plex.TV', {
|
||||
offset,
|
||||
size,
|
||||
label: 'Plex.TV Metadata API',
|
||||
});
|
||||
let cachedWatchlist = watchlistCache.data.get<PlexWatchlistCache>(
|
||||
this.authToken
|
||||
);
|
||||
logger.debug(`Found cached watchlist: ${!!cachedWatchlist}`, {
|
||||
cachedWatchlist,
|
||||
label: 'Plex.TV Metadata API',
|
||||
});
|
||||
|
||||
const response = await this.axios.get<WatchlistResponse>(
|
||||
'/library/sections/watchlist/all',
|
||||
@@ -300,15 +291,11 @@ class PlexTvAPI extends ExternalAPI {
|
||||
headers: {
|
||||
'If-None-Match': cachedWatchlist?.etag,
|
||||
},
|
||||
baseURL: 'https://discover.provider.plex.tv',
|
||||
baseURL: 'https://metadata.provider.plex.tv',
|
||||
validateStatus: (status) => status < 400, // Allow HTTP 304 to return without error
|
||||
}
|
||||
);
|
||||
|
||||
logger.debug(`Watchlist fetch returned status ${response.status}`, {
|
||||
label: 'Plex.TV Metadata API',
|
||||
});
|
||||
|
||||
// If we don't recieve HTTP 304, the watchlist has been updated and we need to update the cache.
|
||||
if (response.status >= 200 && response.status <= 299) {
|
||||
cachedWatchlist = {
|
||||
@@ -325,32 +312,19 @@ class PlexTvAPI extends ExternalAPI {
|
||||
const watchlistDetails = await Promise.all(
|
||||
(cachedWatchlist?.response.MediaContainer.Metadata ?? []).map(
|
||||
async (watchlistItem) => {
|
||||
let detailedResponse: MetadataResponse;
|
||||
try {
|
||||
detailedResponse = await this.getRolling<MetadataResponse>(
|
||||
`/library/metadata/${watchlistItem.ratingKey}`,
|
||||
{
|
||||
baseURL: 'https://discover.provider.plex.tv',
|
||||
}
|
||||
);
|
||||
} catch (e) {
|
||||
if (e.response?.status === 404) {
|
||||
logger.warn(
|
||||
`Item with ratingKey ${watchlistItem.ratingKey} not found, it may have been removed from the server.`,
|
||||
{ label: 'Plex.TV Metadata API' }
|
||||
);
|
||||
return null;
|
||||
} else {
|
||||
throw e;
|
||||
const detailedResponse = await this.getRolling<MetadataResponse>(
|
||||
`/library/metadata/${watchlistItem.ratingKey}`,
|
||||
{
|
||||
baseURL: 'https://metadata.provider.plex.tv',
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const metadata = detailedResponse.MediaContainer.Metadata[0];
|
||||
|
||||
const tmdbString = metadata.Guid?.find((guid) =>
|
||||
const tmdbString = metadata.Guid.find((guid) =>
|
||||
guid.id.startsWith('tmdb')
|
||||
);
|
||||
const tvdbString = metadata.Guid?.find((guid) =>
|
||||
const tvdbString = metadata.Guid.find((guid) =>
|
||||
guid.id.startsWith('tvdb')
|
||||
);
|
||||
|
||||
@@ -369,9 +343,7 @@ class PlexTvAPI extends ExternalAPI {
|
||||
)
|
||||
);
|
||||
|
||||
const filteredList = watchlistDetails.filter(
|
||||
(detail) => detail?.tmdbId
|
||||
) as PlexWatchlistItem[];
|
||||
const filteredList = watchlistDetails.filter((detail) => detail.tmdbId);
|
||||
|
||||
return {
|
||||
offset,
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
import type {
|
||||
TmdbSeasonWithEpisodes,
|
||||
TmdbTvDetails,
|
||||
} from '@server/api/themoviedb/interfaces';
|
||||
|
||||
export interface TvShowProvider {
|
||||
getTvShow({
|
||||
tvId,
|
||||
language,
|
||||
}: {
|
||||
tvId: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbTvDetails>;
|
||||
getTvSeason({
|
||||
tvId,
|
||||
seasonNumber,
|
||||
language,
|
||||
}: {
|
||||
tvId: number;
|
||||
seasonNumber: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbSeasonWithEpisodes>;
|
||||
getShowByTvdbId({
|
||||
tvdbId,
|
||||
language,
|
||||
}: {
|
||||
tvdbId: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbTvDetails>;
|
||||
}
|
||||
@@ -145,7 +145,6 @@ export interface IMDBRating {
|
||||
title: string;
|
||||
url: string;
|
||||
criticsScore: number;
|
||||
criticsScoreCount: number;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -188,7 +187,6 @@ class IMDBRadarrProxy extends ExternalAPI {
|
||||
title: data[0].Title,
|
||||
url: `https://www.imdb.com/title/${data[0].ImdbId}`,
|
||||
criticsScore: data[0].MovieRatings.Imdb.Value,
|
||||
criticsScoreCount: data[0].MovieRatings.Imdb.Count,
|
||||
};
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
|
||||
@@ -198,25 +198,6 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
}
|
||||
};
|
||||
|
||||
public renameTag = async ({
|
||||
id,
|
||||
label,
|
||||
}: {
|
||||
id: number;
|
||||
label: string;
|
||||
}): Promise<Tag> => {
|
||||
try {
|
||||
const response = await this.axios.put<Tag>(`/tag/${id}`, {
|
||||
id,
|
||||
label,
|
||||
});
|
||||
|
||||
return response.data;
|
||||
} catch (e) {
|
||||
throw new Error(`[${this.apiName}] Failed to rename tag: ${e.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
async refreshMonitoredDownloads(): Promise<void> {
|
||||
await this.runCommand('RefreshMonitoredDownloads', {});
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { User } from '@server/entity/User';
|
||||
import type { TautulliSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
|
||||
import type { AxiosInstance } from 'axios';
|
||||
import axios from 'axios';
|
||||
import { uniqWith } from 'lodash';
|
||||
@@ -124,7 +123,8 @@ class TautulliAPI {
|
||||
}${settings.urlBase ?? ''}`,
|
||||
params: { apikey: settings.apiKey },
|
||||
});
|
||||
this.axios.interceptors.request.use(requestInterceptorFunction);
|
||||
this.axios.interceptors.request = axios.interceptors.request;
|
||||
this.axios.interceptors.response = axios.interceptors.response;
|
||||
}
|
||||
|
||||
public async getInfo(): Promise<TautulliInfo> {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import type { TvShowProvider } from '@server/api/provider';
|
||||
import cacheManager from '@server/lib/cache';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import { sortBy } from 'lodash';
|
||||
@@ -86,7 +85,6 @@ interface DiscoverMovieOptions {
|
||||
genre?: string;
|
||||
studio?: string;
|
||||
keywords?: string;
|
||||
excludeKeywords?: string;
|
||||
sortBy?: SortOptions;
|
||||
watchRegion?: string;
|
||||
watchProviders?: string;
|
||||
@@ -112,7 +110,6 @@ interface DiscoverTvOptions {
|
||||
genre?: string;
|
||||
network?: number;
|
||||
keywords?: string;
|
||||
excludeKeywords?: string;
|
||||
sortBy?: SortOptions;
|
||||
watchRegion?: string;
|
||||
watchProviders?: string;
|
||||
@@ -123,7 +120,7 @@ interface DiscoverTvOptions {
|
||||
certificationCountry?: string;
|
||||
}
|
||||
|
||||
class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
class TheMovieDb extends ExternalAPI {
|
||||
private locale: string;
|
||||
private discoverRegion?: string;
|
||||
private originalLanguage?: string;
|
||||
@@ -344,13 +341,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
}
|
||||
);
|
||||
|
||||
data.episodes = data.episodes.map((episode) => {
|
||||
if (episode.still_path) {
|
||||
episode.still_path = `https://image.tmdb.org/t/p/original/${episode.still_path}`;
|
||||
}
|
||||
return episode;
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[TMDB] Failed to fetch TV show details: ${e.message}`);
|
||||
@@ -497,7 +487,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
genre,
|
||||
studio,
|
||||
keywords,
|
||||
excludeKeywords,
|
||||
withRuntimeGte,
|
||||
withRuntimeLte,
|
||||
voteAverageGte,
|
||||
@@ -548,7 +537,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
with_genres: genre,
|
||||
with_companies: studio,
|
||||
with_keywords: keywords,
|
||||
without_keywords: excludeKeywords,
|
||||
'with_runtime.gte': withRuntimeGte,
|
||||
'with_runtime.lte': withRuntimeLte,
|
||||
'vote_average.gte': voteAverageGte,
|
||||
@@ -581,7 +569,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
genre,
|
||||
network,
|
||||
keywords,
|
||||
excludeKeywords,
|
||||
withRuntimeGte,
|
||||
withRuntimeLte,
|
||||
voteAverageGte,
|
||||
@@ -633,7 +620,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
with_genres: genre,
|
||||
with_networks: network,
|
||||
with_keywords: keywords,
|
||||
without_keywords: excludeKeywords,
|
||||
'with_runtime.gte': withRuntimeGte,
|
||||
'with_runtime.lte': withRuntimeLte,
|
||||
'vote_average.gte': voteAverageGte,
|
||||
@@ -1068,7 +1054,7 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
keywordId,
|
||||
}: {
|
||||
keywordId: number;
|
||||
}): Promise<TmdbKeyword | null> {
|
||||
}): Promise<TmdbKeyword> {
|
||||
try {
|
||||
const data = await this.get<TmdbKeyword>(
|
||||
`/keyword/${keywordId}`,
|
||||
@@ -1078,9 +1064,6 @@ class TheMovieDb extends ExternalAPI implements TvShowProvider {
|
||||
|
||||
return data;
|
||||
} catch (e) {
|
||||
if (e.response?.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`[TMDB] Failed to fetch keyword: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -220,7 +220,7 @@ export interface TmdbTvEpisodeResult {
|
||||
show_id: number;
|
||||
still_path: string;
|
||||
vote_average: number;
|
||||
vote_count: number;
|
||||
vote_cuont: number;
|
||||
}
|
||||
|
||||
export interface TmdbTvSeasonResult {
|
||||
|
||||
@@ -1,563 +0,0 @@
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import type { TvShowProvider } from '@server/api/provider';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import type {
|
||||
TmdbSeasonWithEpisodes,
|
||||
TmdbTvDetails,
|
||||
TmdbTvEpisodeResult,
|
||||
TmdbTvSeasonResult,
|
||||
} from '@server/api/themoviedb/interfaces';
|
||||
import {
|
||||
convertTmdbLanguageToTvdbWithFallback,
|
||||
type TvdbBaseResponse,
|
||||
type TvdbEpisode,
|
||||
type TvdbLoginResponse,
|
||||
type TvdbSeasonDetails,
|
||||
type TvdbTvDetails,
|
||||
} from '@server/api/tvdb/interfaces';
|
||||
import cacheManager, { type AvailableCacheIds } from '@server/lib/cache';
|
||||
import logger from '@server/logger';
|
||||
|
||||
interface TvdbConfig {
|
||||
baseUrl: string;
|
||||
maxRequestsPerSecond: number;
|
||||
maxRequests: number;
|
||||
cachePrefix: AvailableCacheIds;
|
||||
}
|
||||
|
||||
const DEFAULT_CONFIG: TvdbConfig = {
|
||||
baseUrl: 'https://api4.thetvdb.com/v4',
|
||||
maxRequestsPerSecond: 50,
|
||||
maxRequests: 20,
|
||||
cachePrefix: 'tvdb' as const,
|
||||
};
|
||||
|
||||
const enum TvdbIdStatus {
|
||||
INVALID = -1,
|
||||
}
|
||||
|
||||
type TvdbId = number;
|
||||
type ValidTvdbId = Exclude<TvdbId, TvdbIdStatus.INVALID>;
|
||||
|
||||
class Tvdb extends ExternalAPI implements TvShowProvider {
|
||||
static instance: Tvdb;
|
||||
private readonly tmdb: TheMovieDb;
|
||||
private static readonly DEFAULT_CACHE_TTL = 43200;
|
||||
private static readonly DEFAULT_LANGUAGE = 'eng';
|
||||
private token: string;
|
||||
private pin?: string;
|
||||
|
||||
constructor(pin?: string) {
|
||||
const finalConfig = { ...DEFAULT_CONFIG };
|
||||
super(
|
||||
finalConfig.baseUrl,
|
||||
{},
|
||||
{
|
||||
nodeCache: cacheManager.getCache(finalConfig.cachePrefix).data,
|
||||
rateLimit: {
|
||||
maxRequests: finalConfig.maxRequests,
|
||||
maxRPS: finalConfig.maxRequestsPerSecond,
|
||||
},
|
||||
}
|
||||
);
|
||||
this.pin = pin;
|
||||
this.tmdb = new TheMovieDb();
|
||||
}
|
||||
|
||||
public static async getInstance(): Promise<Tvdb> {
|
||||
if (!this.instance) {
|
||||
this.instance = new Tvdb();
|
||||
await this.instance.login();
|
||||
}
|
||||
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
private async refreshToken(): Promise<void> {
|
||||
try {
|
||||
if (!this.token) {
|
||||
await this.login();
|
||||
return;
|
||||
}
|
||||
|
||||
const base64Url = this.token.split('.')[1];
|
||||
const base64 = base64Url.replace(/-/g, '+').replace(/_/g, '/');
|
||||
const payload = JSON.parse(Buffer.from(base64, 'base64').toString());
|
||||
|
||||
if (!payload.exp) {
|
||||
await this.login();
|
||||
}
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const diff = payload.exp - now;
|
||||
|
||||
// refresh token 1 week before expiration
|
||||
if (diff < 604800) {
|
||||
await this.login();
|
||||
}
|
||||
} catch (error) {
|
||||
this.handleError('Failed to refresh token', error);
|
||||
}
|
||||
}
|
||||
|
||||
public async test(): Promise<void> {
|
||||
try {
|
||||
await this.login();
|
||||
} catch (error) {
|
||||
this.handleError('Login failed', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async login(): Promise<TvdbLoginResponse> {
|
||||
let body: { apiKey: string; pin?: string } = {
|
||||
apiKey: 'd00d9ecb-a9d0-4860-958a-74b14a041405',
|
||||
};
|
||||
|
||||
if (this.pin) {
|
||||
body = {
|
||||
...body,
|
||||
pin: this.pin,
|
||||
};
|
||||
}
|
||||
|
||||
const response = await this.post<TvdbBaseResponse<TvdbLoginResponse>>(
|
||||
'/login',
|
||||
{
|
||||
...body,
|
||||
}
|
||||
);
|
||||
|
||||
this.token = response.data.token;
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
public async getShowByTvdbId({
|
||||
tvdbId,
|
||||
language,
|
||||
}: {
|
||||
tvdbId: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbTvDetails> {
|
||||
try {
|
||||
const tmdbTvShow = await this.tmdb.getShowByTvdbId({
|
||||
tvdbId: tvdbId,
|
||||
language,
|
||||
});
|
||||
|
||||
try {
|
||||
await this.refreshToken();
|
||||
|
||||
const validTvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
|
||||
|
||||
if (this.isValidTvdbId(validTvdbId)) {
|
||||
return this.enrichTmdbShowWithTvdbData(tmdbTvShow, validTvdbId);
|
||||
}
|
||||
|
||||
return tmdbTvShow;
|
||||
} catch (error) {
|
||||
return tmdbTvShow;
|
||||
}
|
||||
} catch (error) {
|
||||
this.handleError('Failed to fetch TV show details', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async getTvShow({
|
||||
tvId,
|
||||
language,
|
||||
}: {
|
||||
tvId: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbTvDetails> {
|
||||
try {
|
||||
const tmdbTvShow = await this.tmdb.getTvShow({ tvId, language });
|
||||
|
||||
try {
|
||||
await this.refreshToken();
|
||||
|
||||
const tvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
|
||||
|
||||
if (this.isValidTvdbId(tvdbId)) {
|
||||
return await this.enrichTmdbShowWithTvdbData(tmdbTvShow, tvdbId);
|
||||
}
|
||||
|
||||
return tmdbTvShow;
|
||||
} catch (error) {
|
||||
this.handleError('Failed to fetch TV show details', error);
|
||||
return tmdbTvShow;
|
||||
}
|
||||
} catch (error) {
|
||||
this.handleError('Failed to fetch TV show details', error);
|
||||
return this.tmdb.getTvShow({ tvId, language });
|
||||
}
|
||||
}
|
||||
|
||||
public async getTvSeason({
|
||||
tvId,
|
||||
seasonNumber,
|
||||
language = Tvdb.DEFAULT_LANGUAGE,
|
||||
}: {
|
||||
tvId: number;
|
||||
seasonNumber: number;
|
||||
language?: string;
|
||||
}): Promise<TmdbSeasonWithEpisodes> {
|
||||
try {
|
||||
const tmdbTvShow = await this.tmdb.getTvShow({ tvId, language });
|
||||
|
||||
try {
|
||||
await this.refreshToken();
|
||||
|
||||
const tvdbId = this.getTvdbIdFromTmdb(tmdbTvShow);
|
||||
|
||||
if (!this.isValidTvdbId(tvdbId)) {
|
||||
return await this.tmdb.getTvSeason({ tvId, seasonNumber, language });
|
||||
}
|
||||
|
||||
return await this.getTvdbSeasonData(
|
||||
tvdbId,
|
||||
seasonNumber,
|
||||
tvId,
|
||||
language
|
||||
);
|
||||
} catch (error) {
|
||||
this.handleError('Failed to fetch TV season details', error);
|
||||
return await this.tmdb.getTvSeason({ tvId, seasonNumber, language });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[TVDB] Failed to fetch TV season details: ${error.message}`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async enrichTmdbShowWithTvdbData(
|
||||
tmdbTvShow: TmdbTvDetails,
|
||||
tvdbId: ValidTvdbId
|
||||
): Promise<TmdbTvDetails> {
|
||||
try {
|
||||
await this.refreshToken();
|
||||
|
||||
const tvdbData = await this.fetchTvdbShowData(tvdbId);
|
||||
const seasons = this.processSeasons(tvdbData);
|
||||
|
||||
if (!seasons.length) {
|
||||
return tmdbTvShow;
|
||||
}
|
||||
|
||||
return { ...tmdbTvShow, seasons };
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to enrich TMDB show with TVDB data: ${error.message} token: ${this.token}`
|
||||
);
|
||||
return tmdbTvShow;
|
||||
}
|
||||
}
|
||||
|
||||
private async fetchTvdbShowData(tvdbId: number): Promise<TvdbTvDetails> {
|
||||
const resp = await this.get<TvdbBaseResponse<TvdbTvDetails>>(
|
||||
`/series/${tvdbId}/extended?meta=episodes&short=true`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
},
|
||||
},
|
||||
Tvdb.DEFAULT_CACHE_TTL
|
||||
);
|
||||
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
private processSeasons(tvdbData: TvdbTvDetails): TmdbTvSeasonResult[] {
|
||||
if (!tvdbData || !tvdbData.seasons || !tvdbData.episodes) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const seasons = tvdbData.seasons
|
||||
.filter((season) => season.type && season.type.type === 'official')
|
||||
.sort((a, b) => a.number - b.number)
|
||||
.map((season) => this.createSeasonData(season, tvdbData))
|
||||
.filter(
|
||||
(season) => season && season.season_number >= 0
|
||||
) as TmdbTvSeasonResult[];
|
||||
|
||||
return seasons;
|
||||
}
|
||||
|
||||
private createSeasonData(
|
||||
season: TvdbSeasonDetails,
|
||||
tvdbData: TvdbTvDetails
|
||||
): TmdbTvSeasonResult {
|
||||
const seasonNumber = season.number ?? -1;
|
||||
if (seasonNumber < 0) {
|
||||
return {
|
||||
id: 0,
|
||||
episode_count: 0,
|
||||
name: '',
|
||||
overview: '',
|
||||
season_number: -1,
|
||||
poster_path: '',
|
||||
air_date: '',
|
||||
};
|
||||
}
|
||||
|
||||
const episodeCount = tvdbData.episodes.filter(
|
||||
(episode) => episode.seasonNumber === season.number
|
||||
).length;
|
||||
|
||||
return {
|
||||
id: tvdbData.id,
|
||||
episode_count: episodeCount,
|
||||
name: `${season.number}`,
|
||||
overview: '',
|
||||
season_number: season.number,
|
||||
poster_path: '',
|
||||
air_date: '',
|
||||
};
|
||||
}
|
||||
|
||||
private async getTvdbSeasonData(
|
||||
tvdbId: number,
|
||||
seasonNumber: number,
|
||||
tvId: number,
|
||||
language: string = Tvdb.DEFAULT_LANGUAGE
|
||||
): Promise<TmdbSeasonWithEpisodes> {
|
||||
const tvdbData = await this.fetchTvdbShowData(tvdbId);
|
||||
|
||||
if (!tvdbData) {
|
||||
logger.error(`Failed to fetch TVDB data for ID: ${tvdbId}`);
|
||||
return this.createEmptySeasonResponse(tvId);
|
||||
}
|
||||
|
||||
// get season id
|
||||
const season = tvdbData.seasons.find(
|
||||
(season) =>
|
||||
season.number === seasonNumber &&
|
||||
season.type.type &&
|
||||
season.type.type === 'official'
|
||||
);
|
||||
|
||||
if (!season) {
|
||||
logger.error(
|
||||
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
|
||||
);
|
||||
return this.createEmptySeasonResponse(tvId);
|
||||
}
|
||||
|
||||
const wantedTranslation = convertTmdbLanguageToTvdbWithFallback(
|
||||
language,
|
||||
Tvdb.DEFAULT_LANGUAGE
|
||||
);
|
||||
|
||||
// check if translation is available for the season
|
||||
const availableTranslation = season.nameTranslations.filter(
|
||||
(translation) =>
|
||||
translation === wantedTranslation ||
|
||||
translation === Tvdb.DEFAULT_LANGUAGE
|
||||
);
|
||||
|
||||
if (!availableTranslation) {
|
||||
return this.getSeasonWithOriginalLanguage(
|
||||
tvdbId,
|
||||
tvId,
|
||||
seasonNumber,
|
||||
season
|
||||
);
|
||||
}
|
||||
|
||||
return this.getSeasonWithTranslation(
|
||||
tvdbId,
|
||||
tvId,
|
||||
seasonNumber,
|
||||
season,
|
||||
wantedTranslation
|
||||
);
|
||||
}
|
||||
|
||||
private async getSeasonWithTranslation(
|
||||
tvdbId: number,
|
||||
tvId: number,
|
||||
seasonNumber: number,
|
||||
season: TvdbSeasonDetails,
|
||||
language: string
|
||||
): Promise<TmdbSeasonWithEpisodes> {
|
||||
if (!season) {
|
||||
logger.error(
|
||||
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
|
||||
);
|
||||
return this.createEmptySeasonResponse(tvId);
|
||||
}
|
||||
|
||||
const allEpisodes = [] as TvdbEpisode[];
|
||||
let page = 0;
|
||||
// Limit to max 50 pages to avoid infinite loops.
|
||||
// 50 pages with 500 items per page = 25_000 episodes in a series which should be more than enough
|
||||
const maxPages = 50;
|
||||
|
||||
while (page < maxPages) {
|
||||
const resp = await this.get<TvdbBaseResponse<TvdbSeasonDetails>>(
|
||||
`/series/${tvdbId}/episodes/default/${language}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
},
|
||||
params: {
|
||||
page: page,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (!resp?.data?.episodes) {
|
||||
logger.warn(
|
||||
`No episodes found for TVDB ID: ${tvdbId} on page ${page} for season ${seasonNumber}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
const { episodes } = resp.data;
|
||||
|
||||
if (!episodes) {
|
||||
logger.debug(
|
||||
`No more episodes found for TVDB ID: ${tvdbId} on page ${page} for season ${seasonNumber}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
allEpisodes.push(...episodes);
|
||||
|
||||
const hasNextPage = resp.links?.next && episodes.length > 0;
|
||||
|
||||
if (!hasNextPage) {
|
||||
break;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
if (page >= maxPages) {
|
||||
logger.warn(
|
||||
`Reached max pages (${maxPages}) for TVDB ID: ${tvdbId} on season ${seasonNumber} with language ${language}. There might be more episodes available.`
|
||||
);
|
||||
}
|
||||
|
||||
const episodes = this.processEpisodes(
|
||||
{ ...season, episodes: allEpisodes },
|
||||
seasonNumber,
|
||||
tvId
|
||||
);
|
||||
|
||||
return {
|
||||
episodes,
|
||||
external_ids: { tvdb_id: tvdbId },
|
||||
name: '',
|
||||
overview: '',
|
||||
id: season.id,
|
||||
air_date: season.firstAired,
|
||||
season_number: episodes.length,
|
||||
};
|
||||
}
|
||||
|
||||
private async getSeasonWithOriginalLanguage(
|
||||
tvdbId: number,
|
||||
tvId: number,
|
||||
seasonNumber: number,
|
||||
season: TvdbSeasonDetails
|
||||
): Promise<TmdbSeasonWithEpisodes> {
|
||||
if (!season) {
|
||||
logger.error(
|
||||
`Failed to find season ${seasonNumber} for TVDB ID: ${tvdbId}`
|
||||
);
|
||||
return this.createEmptySeasonResponse(tvId);
|
||||
}
|
||||
|
||||
const resp = await this.get<TvdbBaseResponse<TvdbSeasonDetails>>(
|
||||
`/seasons/${season.id}/extended`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const seasons = resp.data;
|
||||
|
||||
const episodes = this.processEpisodes(seasons, seasonNumber, tvId);
|
||||
|
||||
return {
|
||||
episodes,
|
||||
external_ids: { tvdb_id: tvdbId },
|
||||
name: '',
|
||||
overview: '',
|
||||
id: seasons.id,
|
||||
air_date: seasons.firstAired,
|
||||
season_number: episodes.length,
|
||||
};
|
||||
}
|
||||
|
||||
private processEpisodes(
|
||||
tvdbSeason: TvdbSeasonDetails,
|
||||
seasonNumber: number,
|
||||
tvId: number
|
||||
): TmdbTvEpisodeResult[] {
|
||||
if (!tvdbSeason || !tvdbSeason.episodes) {
|
||||
logger.error('No episodes found in TVDB season data');
|
||||
return [];
|
||||
}
|
||||
|
||||
return tvdbSeason.episodes
|
||||
.filter((episode) => episode.seasonNumber === seasonNumber)
|
||||
.map((episode, index) => this.createEpisodeData(episode, index, tvId));
|
||||
}
|
||||
|
||||
private createEpisodeData(
|
||||
episode: TvdbEpisode,
|
||||
index: number,
|
||||
tvId: number
|
||||
): TmdbTvEpisodeResult {
|
||||
return {
|
||||
id: episode.id,
|
||||
air_date: episode.aired,
|
||||
episode_number: episode.number,
|
||||
name: episode.name || `Episode ${index + 1}`,
|
||||
overview: episode.overview || '',
|
||||
season_number: episode.seasonNumber,
|
||||
production_code: '',
|
||||
show_id: tvId,
|
||||
still_path:
|
||||
episode.image && !episode.image.startsWith('https://')
|
||||
? 'https://artworks.thetvdb.com' + episode.image
|
||||
: '',
|
||||
vote_average: 1,
|
||||
vote_count: 1,
|
||||
};
|
||||
}
|
||||
|
||||
private createEmptySeasonResponse(tvId: number): TmdbSeasonWithEpisodes {
|
||||
return {
|
||||
episodes: [],
|
||||
external_ids: { tvdb_id: tvId },
|
||||
name: '',
|
||||
overview: '',
|
||||
id: 0,
|
||||
air_date: '',
|
||||
season_number: 0,
|
||||
};
|
||||
}
|
||||
|
||||
private getTvdbIdFromTmdb(tmdbTvShow: TmdbTvDetails): TvdbId {
|
||||
return tmdbTvShow?.external_ids?.tvdb_id ?? TvdbIdStatus.INVALID;
|
||||
}
|
||||
|
||||
private isValidTvdbId(tvdbId: TvdbId): tvdbId is ValidTvdbId {
|
||||
return tvdbId !== TvdbIdStatus.INVALID;
|
||||
}
|
||||
|
||||
private handleError(context: string, error: Error): void {
|
||||
throw new Error(`[TVDB] ${context}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default Tvdb;
|
||||
@@ -1,216 +0,0 @@
|
||||
import { type AvailableLocale } from '@server/types/languages';
|
||||
|
||||
export interface TvdbBaseResponse<T> {
|
||||
data: T;
|
||||
errors: string;
|
||||
links?: TvdbPagination;
|
||||
}
|
||||
|
||||
export interface TvdbPagination {
|
||||
prev?: string;
|
||||
self: string;
|
||||
next?: string;
|
||||
totalItems: number;
|
||||
pageSize: number;
|
||||
}
|
||||
|
||||
export interface TvdbLoginResponse {
|
||||
token: string;
|
||||
}
|
||||
|
||||
interface TvDetailsAliases {
|
||||
language: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface TvDetailsStatus {
|
||||
id: number;
|
||||
name: string;
|
||||
recordType: string;
|
||||
keepUpdated: boolean;
|
||||
}
|
||||
|
||||
export interface TvdbTvDetails {
|
||||
id: number;
|
||||
name: string;
|
||||
slug: string;
|
||||
image: string;
|
||||
nameTranslations: string[];
|
||||
overwiewTranslations: string[];
|
||||
aliases: TvDetailsAliases[];
|
||||
firstAired: Date;
|
||||
lastAired: Date;
|
||||
nextAired: Date | string;
|
||||
score: number;
|
||||
status: TvDetailsStatus;
|
||||
originalCountry: string;
|
||||
originalLanguage: string;
|
||||
defaultSeasonType: string;
|
||||
isOrderRandomized: boolean;
|
||||
lastUpdated: Date;
|
||||
averageRuntime: number;
|
||||
seasons: TvdbSeasonDetails[];
|
||||
episodes: TvdbEpisode[];
|
||||
}
|
||||
|
||||
interface TvdbCompanyType {
|
||||
companyTypeId: number;
|
||||
companyTypeName: string;
|
||||
}
|
||||
|
||||
interface TvdbParentCompany {
|
||||
id?: number;
|
||||
name?: string;
|
||||
relation?: {
|
||||
id?: number;
|
||||
typeName?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface TvdbCompany {
|
||||
id: number;
|
||||
name: string;
|
||||
slug: string;
|
||||
nameTranslations?: string[];
|
||||
overviewTranslations?: string[];
|
||||
aliases?: string[];
|
||||
country: string;
|
||||
primaryCompanyType: number;
|
||||
activeDate: string;
|
||||
inactiveDate?: string;
|
||||
companyType: TvdbCompanyType;
|
||||
parentCompany: TvdbParentCompany;
|
||||
tagOptions?: string[];
|
||||
}
|
||||
|
||||
interface TvdbType {
|
||||
id: number;
|
||||
name: string;
|
||||
type: string;
|
||||
alternateName?: string;
|
||||
}
|
||||
|
||||
interface TvdbArtwork {
|
||||
id: number;
|
||||
image: string;
|
||||
thumbnail: string;
|
||||
language: string;
|
||||
type: number;
|
||||
score: number;
|
||||
width: number;
|
||||
height: number;
|
||||
includesText: boolean;
|
||||
}
|
||||
|
||||
export interface TvdbEpisode {
|
||||
id: number;
|
||||
seriesId: number;
|
||||
name: string;
|
||||
aired: string;
|
||||
runtime: number;
|
||||
nameTranslations: string[];
|
||||
overview?: string;
|
||||
overviewTranslations: string[];
|
||||
image: string;
|
||||
imageType: number;
|
||||
isMovie: number;
|
||||
seasons?: string[];
|
||||
number: number;
|
||||
absoluteNumber: number;
|
||||
seasonNumber: number;
|
||||
lastUpdated: string;
|
||||
finaleType?: string;
|
||||
year: string;
|
||||
}
|
||||
|
||||
export interface TvdbSeasonDetails {
|
||||
id: number;
|
||||
seriesId: number;
|
||||
type: TvdbType;
|
||||
number: number;
|
||||
nameTranslations: string[];
|
||||
overviewTranslations: string[];
|
||||
image: string;
|
||||
imageType: number;
|
||||
companies: {
|
||||
studio: TvdbCompany[];
|
||||
network: TvdbCompany[];
|
||||
production: TvdbCompany[];
|
||||
distributor: TvdbCompany[];
|
||||
special_effects: TvdbCompany[];
|
||||
};
|
||||
lastUpdated: string;
|
||||
year: string;
|
||||
episodes: TvdbEpisode[];
|
||||
trailers: string[];
|
||||
artwork: TvdbArtwork[];
|
||||
tagOptions?: string[];
|
||||
firstAired: string;
|
||||
}
|
||||
|
||||
export interface TvdbEpisodeTranslation {
|
||||
name: string;
|
||||
overview: string;
|
||||
language: string;
|
||||
}
|
||||
|
||||
const TMDB_TO_TVDB_MAPPING: Record<string, string> & {
|
||||
[key in AvailableLocale]: string;
|
||||
} = {
|
||||
ar: 'ara', // Arabic
|
||||
bg: 'bul', // Bulgarian
|
||||
ca: 'cat', // Catalan
|
||||
cs: 'ces', // Czech
|
||||
da: 'dan', // Danish
|
||||
de: 'deu', // German
|
||||
el: 'ell', // Greek
|
||||
en: 'eng', // English
|
||||
es: 'spa', // Spanish
|
||||
fi: 'fin', // Finnish
|
||||
fr: 'fra', // French
|
||||
he: 'heb', // Hebrew
|
||||
hi: 'hin', // Hindi
|
||||
hr: 'hrv', // Croatian
|
||||
hu: 'hun', // Hungarian
|
||||
it: 'ita', // Italian
|
||||
ja: 'jpn', // Japanese
|
||||
ko: 'kor', // Korean
|
||||
lt: 'lit', // Lithuanian
|
||||
nl: 'nld', // Dutch
|
||||
pl: 'pol', // Polish
|
||||
ro: 'ron', // Romanian
|
||||
ru: 'rus', // Russian
|
||||
sq: 'sqi', // Albanian
|
||||
sr: 'srp', // Serbian
|
||||
sv: 'swe', // Swedish
|
||||
tr: 'tur', // Turkish
|
||||
uk: 'ukr', // Ukrainian
|
||||
|
||||
'es-MX': 'spa', // Spanish (Latin America) -> Spanish
|
||||
'nb-NO': 'nor', // Norwegian Bokmål -> Norwegian
|
||||
'pt-BR': 'pt', // Portuguese (Brazil) -> Portuguese - Brazil (from TVDB data)
|
||||
'pt-PT': 'por', // Portuguese (Portugal) -> Portuguese - Portugal (from TVDB data)
|
||||
'zh-CN': 'zho', // Chinese (Simplified) -> Chinese - China
|
||||
'zh-TW': 'zhtw', // Chinese (Traditional) -> Chinese - Taiwan
|
||||
};
|
||||
|
||||
export function convertTMDBToTVDB(tmdbCode: string): string | null {
|
||||
const normalizedCode = tmdbCode.toLowerCase();
|
||||
|
||||
return (
|
||||
TMDB_TO_TVDB_MAPPING[tmdbCode] ||
|
||||
TMDB_TO_TVDB_MAPPING[normalizedCode] ||
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
export function convertTmdbLanguageToTvdbWithFallback(
|
||||
tmdbCode: string,
|
||||
fallback: string
|
||||
): string {
|
||||
// First try exact match
|
||||
const tvdbCode = convertTMDBToTVDB(tmdbCode);
|
||||
if (tvdbCode) return tvdbCode;
|
||||
|
||||
return tvdbCode || fallback || 'eng'; // Default to English if no match found
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import notificationManager from '@server/lib/notifications';
|
||||
import DiscordAgent from '@server/lib/notifications/agents/discord';
|
||||
import EmailAgent from '@server/lib/notifications/agents/email';
|
||||
import GotifyAgent from '@server/lib/notifications/agents/gotify';
|
||||
import LunaSeaAgent from '@server/lib/notifications/agents/lunasea';
|
||||
import NtfyAgent from '@server/lib/notifications/agents/ntfy';
|
||||
import PushbulletAgent from '@server/lib/notifications/agents/pushbullet';
|
||||
import PushoverAgent from '@server/lib/notifications/agents/pushover';
|
||||
@@ -25,7 +26,6 @@ import imageproxy from '@server/routes/imageproxy';
|
||||
import { appDataPermissions } from '@server/utils/appDataVolume';
|
||||
import { getAppVersion } from '@server/utils/appVersion';
|
||||
import createCustomProxyAgent from '@server/utils/customProxyAgent';
|
||||
import { initializeDnsCache } from '@server/utils/dnsCache';
|
||||
import restartFlag from '@server/utils/restartFlag';
|
||||
import { getClientIp } from '@supercharge/request-ip';
|
||||
import axios from 'axios';
|
||||
@@ -81,14 +81,6 @@ app
|
||||
axios.defaults.httpsAgent = new https.Agent({ family: 4 });
|
||||
}
|
||||
|
||||
// Add DNS caching
|
||||
if (settings.network.dnsCache?.enabled) {
|
||||
initializeDnsCache({
|
||||
forceMinTtl: settings.network.dnsCache.forceMinTtl,
|
||||
forceMaxTtl: settings.network.dnsCache.forceMaxTtl,
|
||||
});
|
||||
}
|
||||
|
||||
// Register HTTP proxy
|
||||
if (settings.network.proxy.enabled) {
|
||||
await createCustomProxyAgent(settings.network.proxy);
|
||||
@@ -121,6 +113,7 @@ app
|
||||
new EmailAgent(),
|
||||
new GotifyAgent(),
|
||||
new NtfyAgent(),
|
||||
new LunaSeaAgent(),
|
||||
new PushbulletAgent(),
|
||||
new PushoverAgent(),
|
||||
new SlackAgent(),
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { DnsEntries, DnsStats } from 'dns-caching';
|
||||
import type { PaginatedResponse } from './common';
|
||||
|
||||
export type LogMessage = {
|
||||
@@ -65,10 +64,6 @@ export interface CacheItem {
|
||||
export interface CacheResponse {
|
||||
apiCaches: CacheItem[];
|
||||
imageCache: Record<'tmdb' | 'avatar', { size: number; imageCount: number }>;
|
||||
dnsCache: {
|
||||
stats: DnsStats | undefined;
|
||||
entries: DnsEntries | undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export interface StatusResponse {
|
||||
|
||||
@@ -72,7 +72,6 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
|
||||
const blacklistedTagsArr = blacklistedTags.split(',');
|
||||
|
||||
const pageLimit = settings.main.blacklistedTagsLimit;
|
||||
const invalidKeywords = new Set<string>();
|
||||
|
||||
if (blacklistedTags.length === 0) {
|
||||
return;
|
||||
@@ -88,19 +87,6 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
|
||||
|
||||
// Iterate for each tag
|
||||
for (const tag of blacklistedTagsArr) {
|
||||
const keywordDetails = await tmdb.getKeywordDetails({
|
||||
keywordId: Number(tag),
|
||||
});
|
||||
|
||||
if (keywordDetails === null) {
|
||||
logger.warn('Skipping invalid keyword in blacklisted tags', {
|
||||
label: 'Blacklisted Tags Processor',
|
||||
keywordId: tag,
|
||||
});
|
||||
invalidKeywords.add(tag);
|
||||
continue;
|
||||
}
|
||||
|
||||
let queryMax = pageLimit * SortOptionsIterable.length;
|
||||
let fixedSortMode = false; // Set to true when the page limit allows for getting every page of tag
|
||||
|
||||
@@ -116,51 +102,24 @@ class BlacklistedTagProcessor implements RunnableScanner<StatusBase> {
|
||||
throw new AbortTransaction();
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await getDiscover({
|
||||
page,
|
||||
sortBy,
|
||||
keywords: tag,
|
||||
});
|
||||
const response = await getDiscover({
|
||||
page,
|
||||
sortBy,
|
||||
keywords: tag,
|
||||
});
|
||||
await this.processResults(response, tag, type, em);
|
||||
await new Promise((res) => setTimeout(res, TMDB_API_DELAY_MS));
|
||||
|
||||
await this.processResults(response, tag, type, em);
|
||||
await new Promise((res) => setTimeout(res, TMDB_API_DELAY_MS));
|
||||
|
||||
this.progress++;
|
||||
if (page === 1 && response.total_pages <= queryMax) {
|
||||
// We will finish the tag with less queries than expected, move progress accordingly
|
||||
this.progress += queryMax - response.total_pages;
|
||||
fixedSortMode = true;
|
||||
queryMax = response.total_pages;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing keyword in blacklisted tags', {
|
||||
label: 'Blacklisted Tags Processor',
|
||||
keywordId: tag,
|
||||
errorMessage: error.message,
|
||||
});
|
||||
this.progress++;
|
||||
if (page === 1 && response.total_pages <= queryMax) {
|
||||
// We will finish the tag with less queries than expected, move progress accordingly
|
||||
this.progress += queryMax - response.total_pages;
|
||||
fixedSortMode = true;
|
||||
queryMax = response.total_pages;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidKeywords.size > 0) {
|
||||
const currentTags = blacklistedTagsArr.filter(
|
||||
(tag) => !invalidKeywords.has(tag)
|
||||
);
|
||||
const cleanedTags = currentTags.join(',');
|
||||
|
||||
if (cleanedTags !== blacklistedTags) {
|
||||
settings.main.blacklistedTags = cleanedTags;
|
||||
await settings.save();
|
||||
|
||||
logger.info('Cleaned up invalid keywords from settings', {
|
||||
label: 'Blacklisted Tags Processor',
|
||||
removedKeywords: Array.from(invalidKeywords),
|
||||
newBlacklistedTags: cleanedTags,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async processResults(
|
||||
|
||||
@@ -9,8 +9,7 @@ export type AvailableCacheIds =
|
||||
| 'github'
|
||||
| 'plexguid'
|
||||
| 'plextv'
|
||||
| 'plexwatchlist'
|
||||
| 'tvdb';
|
||||
| 'plexwatchlist';
|
||||
|
||||
const DEFAULT_TTL = 300;
|
||||
const DEFAULT_CHECK_PERIOD = 120;
|
||||
@@ -71,10 +70,6 @@ class CacheManager {
|
||||
checkPeriod: 60,
|
||||
}),
|
||||
plexwatchlist: new Cache('plexwatchlist', 'Plex Watchlist'),
|
||||
tvdb: new Cache('tvdb', 'The TVDB API', {
|
||||
stdTtl: 21600,
|
||||
checkPeriod: 60 * 30,
|
||||
}),
|
||||
};
|
||||
|
||||
public getCache(id: AvailableCacheIds): Cache {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import logger from '@server/logger';
|
||||
import { requestInterceptorFunction } from '@server/utils/customProxyAgent';
|
||||
import axios from 'axios';
|
||||
import rateLimit, { type rateLimitOptions } from 'axios-rate-limit';
|
||||
import { createHash } from 'crypto';
|
||||
@@ -151,7 +150,8 @@ class ImageProxy {
|
||||
baseURL: baseUrl,
|
||||
headers: options.headers,
|
||||
});
|
||||
this.axios.interceptors.request.use(requestInterceptorFunction);
|
||||
this.axios.interceptors.request = axios.interceptors.request;
|
||||
this.axios.interceptors.response = axios.interceptors.response;
|
||||
|
||||
if (options.rateLimitOptions) {
|
||||
this.axios = rateLimit(this.axios, options.rateLimitOptions);
|
||||
|
||||
@@ -109,9 +109,7 @@ class DiscordAgent
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): DiscordRichEmbed {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.discord;
|
||||
const { applicationUrl } = getSettings().main;
|
||||
|
||||
const appUrl =
|
||||
applicationUrl || `http://localhost:${process.env.port || 5055}`;
|
||||
@@ -225,11 +223,9 @@ class DiscordAgent
|
||||
}
|
||||
: undefined,
|
||||
fields,
|
||||
thumbnail: embedPoster
|
||||
? {
|
||||
url: payload.image,
|
||||
}
|
||||
: undefined,
|
||||
thumbnail: {
|
||||
url: payload.image,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -48,9 +48,7 @@ class EmailAgent
|
||||
recipientEmail: string,
|
||||
recipientName?: string
|
||||
): EmailOptions | undefined {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl, applicationTitle } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.email;
|
||||
const { applicationUrl, applicationTitle } = getSettings().main;
|
||||
|
||||
if (type === Notification.TEST_NOTIFICATION) {
|
||||
return {
|
||||
@@ -131,7 +129,7 @@ class EmailAgent
|
||||
body,
|
||||
mediaName: payload.subject,
|
||||
mediaExtra: payload.extra ?? [],
|
||||
imageUrl: embedPoster ? payload.image : undefined,
|
||||
imageUrl: payload.image,
|
||||
timestamp: new Date().toTimeString(),
|
||||
requestedBy: payload.request.requestedBy.displayName,
|
||||
actionUrl: applicationUrl
|
||||
@@ -178,7 +176,7 @@ class EmailAgent
|
||||
issueComment: payload.comment?.message,
|
||||
mediaName: payload.subject,
|
||||
extra: payload.extra ?? [],
|
||||
imageUrl: embedPoster ? payload.image : undefined,
|
||||
imageUrl: payload.image,
|
||||
timestamp: new Date().toTimeString(),
|
||||
actionUrl: applicationUrl
|
||||
? `${applicationUrl}/issues/${payload.issue.id}`
|
||||
|
||||
@@ -35,7 +35,7 @@ class GotifyAgent
|
||||
settings.enabled &&
|
||||
settings.options.url &&
|
||||
settings.options.token &&
|
||||
settings.options.priority !== undefined
|
||||
settings.options.priority
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
133
server/lib/notifications/agents/lunasea.ts
Normal file
133
server/lib/notifications/agents/lunasea.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { IssueStatus, IssueType } from '@server/constants/issue';
|
||||
import { MediaStatus } from '@server/constants/media';
|
||||
import type { NotificationAgentLunaSea } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import { hasNotificationType, Notification } from '..';
|
||||
import type { NotificationAgent, NotificationPayload } from './agent';
|
||||
import { BaseAgent } from './agent';
|
||||
|
||||
class LunaSeaAgent
|
||||
extends BaseAgent<NotificationAgentLunaSea>
|
||||
implements NotificationAgent
|
||||
{
|
||||
protected getSettings(): NotificationAgentLunaSea {
|
||||
if (this.settings) {
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
const settings = getSettings();
|
||||
|
||||
return settings.notifications.agents.lunasea;
|
||||
}
|
||||
|
||||
private buildPayload(type: Notification, payload: NotificationPayload) {
|
||||
return {
|
||||
notification_type: Notification[type],
|
||||
event: payload.event,
|
||||
subject: payload.subject,
|
||||
message: payload.message,
|
||||
image: payload.image ?? null,
|
||||
email: payload.notifyUser?.email,
|
||||
username: payload.notifyUser?.displayName,
|
||||
avatar: payload.notifyUser?.avatar,
|
||||
media: payload.media
|
||||
? {
|
||||
media_type: payload.media.mediaType,
|
||||
tmdbId: payload.media.tmdbId,
|
||||
tvdbId: payload.media.tvdbId,
|
||||
status: MediaStatus[payload.media.status],
|
||||
status4k: MediaStatus[payload.media.status4k],
|
||||
}
|
||||
: null,
|
||||
extra: payload.extra ?? [],
|
||||
request: payload.request
|
||||
? {
|
||||
request_id: payload.request.id,
|
||||
requestedBy_email: payload.request.requestedBy.email,
|
||||
requestedBy_username: payload.request.requestedBy.displayName,
|
||||
requestedBy_avatar: payload.request.requestedBy.avatar,
|
||||
}
|
||||
: null,
|
||||
issue: payload.issue
|
||||
? {
|
||||
issue_id: payload.issue.id,
|
||||
issue_type: IssueType[payload.issue.issueType],
|
||||
issue_status: IssueStatus[payload.issue.status],
|
||||
createdBy_email: payload.issue.createdBy.email,
|
||||
createdBy_username: payload.issue.createdBy.displayName,
|
||||
createdBy_avatar: payload.issue.createdBy.avatar,
|
||||
}
|
||||
: null,
|
||||
comment: payload.comment
|
||||
? {
|
||||
comment_message: payload.comment.message,
|
||||
commentedBy_email: payload.comment.user.email,
|
||||
commentedBy_username: payload.comment.user.displayName,
|
||||
commentedBy_avatar: payload.comment.user.avatar,
|
||||
}
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
public shouldSend(): boolean {
|
||||
const settings = this.getSettings();
|
||||
|
||||
if (settings.enabled && settings.options.webhookUrl) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public async send(
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): Promise<boolean> {
|
||||
const settings = this.getSettings();
|
||||
|
||||
if (
|
||||
!payload.notifySystem ||
|
||||
!hasNotificationType(type, settings.types ?? 0)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
logger.debug('Sending LunaSea notification', {
|
||||
label: 'Notifications',
|
||||
type: Notification[type],
|
||||
subject: payload.subject,
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
settings.options.webhookUrl,
|
||||
this.buildPayload(type, payload),
|
||||
settings.options.profileName
|
||||
? {
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(
|
||||
`${settings.options.profileName}:`
|
||||
).toString('base64')}`,
|
||||
},
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
logger.error('Error sending LunaSea notification', {
|
||||
label: 'Notifications',
|
||||
type: Notification[type],
|
||||
subject: payload.subject,
|
||||
errorMessage: e.message,
|
||||
response: e?.response?.data,
|
||||
});
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default LunaSeaAgent;
|
||||
@@ -22,9 +22,7 @@ class NtfyAgent
|
||||
}
|
||||
|
||||
private buildPayload(type: Notification, payload: NotificationPayload) {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.ntfy;
|
||||
const { applicationUrl } = getSettings().main;
|
||||
|
||||
const topic = this.getSettings().options.topic;
|
||||
const priority = 3;
|
||||
@@ -74,7 +72,7 @@ class NtfyAgent
|
||||
message += `\n\n**${extra.name}**\n${extra.value}`;
|
||||
}
|
||||
|
||||
const attach = embedPoster ? payload.image : undefined;
|
||||
const attach = payload.image;
|
||||
|
||||
let click;
|
||||
if (applicationUrl && payload.media) {
|
||||
|
||||
@@ -78,9 +78,7 @@ class PushoverAgent
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): Promise<Partial<PushoverPayload>> {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl, applicationTitle } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.pushover;
|
||||
const { applicationUrl, applicationTitle } = getSettings().main;
|
||||
|
||||
const title = payload.event ?? payload.subject;
|
||||
let message = payload.event ? `<b>${payload.subject}</b>` : '';
|
||||
@@ -157,7 +155,7 @@ class PushoverAgent
|
||||
|
||||
let attachment_base64;
|
||||
let attachment_type;
|
||||
if (embedPoster && payload.image) {
|
||||
if (payload.image) {
|
||||
const imagePayload = await this.getImagePayload(payload.image);
|
||||
if (imagePayload.attachment_base64 && imagePayload.attachment_type) {
|
||||
attachment_base64 = imagePayload.attachment_base64;
|
||||
|
||||
@@ -63,9 +63,7 @@ class SlackAgent
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): SlackBlockEmbed {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl, applicationTitle } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.slack;
|
||||
const { applicationUrl, applicationTitle } = getSettings().main;
|
||||
|
||||
const fields: EmbedField[] = [];
|
||||
|
||||
@@ -161,14 +159,13 @@ class SlackAgent
|
||||
type: 'mrkdwn',
|
||||
text: payload.message,
|
||||
},
|
||||
accessory:
|
||||
embedPoster && payload.image
|
||||
? {
|
||||
type: 'image',
|
||||
image_url: payload.image,
|
||||
alt_text: payload.subject,
|
||||
}
|
||||
: undefined,
|
||||
accessory: payload.image
|
||||
? {
|
||||
type: 'image',
|
||||
image_url: payload.image,
|
||||
alt_text: payload.subject,
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -65,9 +65,7 @@ class TelegramAgent
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): Partial<TelegramMessagePayload | TelegramPhotoPayload> {
|
||||
const settings = getSettings();
|
||||
const { applicationUrl, applicationTitle } = settings.main;
|
||||
const { embedPoster } = settings.notifications.agents.telegram;
|
||||
const { applicationUrl, applicationTitle } = getSettings().main;
|
||||
|
||||
/* eslint-disable no-useless-escape */
|
||||
let message = `\*${this.escapeText(
|
||||
@@ -144,7 +142,7 @@ class TelegramAgent
|
||||
}
|
||||
/* eslint-enable */
|
||||
|
||||
return embedPoster && payload.image
|
||||
return payload.image
|
||||
? {
|
||||
photo: payload.image,
|
||||
caption: message,
|
||||
@@ -162,7 +160,7 @@ class TelegramAgent
|
||||
): Promise<boolean> {
|
||||
const settings = this.getSettings();
|
||||
const endpoint = `${this.baseUrl}bot${settings.options.botAPI}/${
|
||||
settings.embedPoster && payload.image ? 'sendPhoto' : 'sendMessage'
|
||||
payload.image ? 'sendPhoto' : 'sendMessage'
|
||||
}`;
|
||||
const notificationPayload = this.getNotificationPayload(type, payload);
|
||||
|
||||
|
||||
@@ -177,27 +177,9 @@ class WebhookAgent
|
||||
subject: payload.subject,
|
||||
});
|
||||
|
||||
let webhookUrl = settings.options.webhookUrl;
|
||||
|
||||
if (settings.options.supportVariables) {
|
||||
Object.keys(KeyMap).forEach((keymapKey) => {
|
||||
const keymapValue = KeyMap[keymapKey as keyof typeof KeyMap];
|
||||
const variableValue =
|
||||
type === Notification.TEST_NOTIFICATION
|
||||
? 'test'
|
||||
: typeof keymapValue === 'function'
|
||||
? keymapValue(payload, type)
|
||||
: get(payload, keymapValue) || 'test';
|
||||
webhookUrl = webhookUrl.replace(
|
||||
new RegExp(`{{${keymapKey}}}`, 'g'),
|
||||
encodeURIComponent(variableValue)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
webhookUrl,
|
||||
settings.options.webhookUrl,
|
||||
this.buildPayload(type, payload),
|
||||
settings.options.authHeader
|
||||
? {
|
||||
|
||||
@@ -42,8 +42,6 @@ class WebPushAgent
|
||||
type: Notification,
|
||||
payload: NotificationPayload
|
||||
): PushNotificationPayload {
|
||||
const { embedPoster } = getSettings().notifications.agents.webpush;
|
||||
|
||||
const mediaType = payload.media
|
||||
? payload.media.mediaType === MediaType.MOVIE
|
||||
? 'movie'
|
||||
@@ -130,7 +128,7 @@ class WebPushAgent
|
||||
notificationType: Notification[type],
|
||||
subject: payload.subject,
|
||||
message,
|
||||
image: embedPoster ? payload.image : undefined,
|
||||
image: payload.image,
|
||||
requestId: payload.request?.id,
|
||||
actionUrl,
|
||||
actionUrlTitle,
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
import animeList from '@server/api/animelist';
|
||||
import type { JellyfinLibraryItem } from '@server/api/jellyfin';
|
||||
import JellyfinAPI from '@server/api/jellyfin';
|
||||
import { getMetadataProvider } from '@server/api/metadata';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
|
||||
import type {
|
||||
TmdbKeyword,
|
||||
TmdbTvDetails,
|
||||
} from '@server/api/themoviedb/interfaces';
|
||||
import type { TmdbTvDetails } from '@server/api/themoviedb/interfaces';
|
||||
import { MediaStatus, MediaType } from '@server/constants/media';
|
||||
import { MediaServerType } from '@server/constants/server';
|
||||
import { getRepository } from '@server/datasource';
|
||||
@@ -46,11 +40,9 @@ class JellyfinScanner {
|
||||
private enable4kMovie = false;
|
||||
private enable4kShow = false;
|
||||
private asyncLock = new AsyncLock();
|
||||
private processedAnidbSeason: Map<number, Map<number, number>>;
|
||||
|
||||
constructor({ isRecentOnly }: { isRecentOnly?: boolean } = {}) {
|
||||
this.tmdb = new TheMovieDb();
|
||||
|
||||
this.isRecentOnly = isRecentOnly ?? false;
|
||||
}
|
||||
|
||||
@@ -68,7 +60,7 @@ class JellyfinScanner {
|
||||
const mediaRepository = getRepository(Media);
|
||||
|
||||
try {
|
||||
let metadata = await this.jfClient.getItemData(jellyfinitem.Id);
|
||||
const metadata = await this.jfClient.getItemData(jellyfinitem.Id);
|
||||
const newMedia = new Media();
|
||||
|
||||
if (!metadata?.Id) {
|
||||
@@ -79,18 +71,8 @@ class JellyfinScanner {
|
||||
return;
|
||||
}
|
||||
|
||||
const anidbId = Number(metadata.ProviderIds.AniDB ?? null);
|
||||
|
||||
newMedia.tmdbId = Number(metadata.ProviderIds.Tmdb ?? null);
|
||||
newMedia.imdbId = metadata.ProviderIds.Imdb;
|
||||
|
||||
// We use anidb only if we have the anidbId and nothing else
|
||||
if (anidbId && !newMedia.imdbId && !newMedia.tmdbId) {
|
||||
const result = animeList.getFromAnidbId(anidbId);
|
||||
newMedia.tmdbId = Number(result?.tmdbId ?? null);
|
||||
newMedia.imdbId = result?.imdbId;
|
||||
}
|
||||
|
||||
if (newMedia.imdbId && !isNaN(newMedia.tmdbId)) {
|
||||
const tmdbMovie = await this.tmdb.getMediaByImdbId({
|
||||
imdbId: newMedia.imdbId,
|
||||
@@ -101,40 +83,6 @@ class JellyfinScanner {
|
||||
throw new Error('Unable to find TMDb ID');
|
||||
}
|
||||
|
||||
// With AniDB we can have mixed libraries with movies in a "show" library
|
||||
// We take the first episode of the first season (the movie) and use it to
|
||||
// get more information, like the MediaSource
|
||||
if (anidbId && metadata.Type === 'Series') {
|
||||
const season = (await this.jfClient.getSeasons(jellyfinitem.Id)).find(
|
||||
(md) => {
|
||||
return md.IndexNumber === 1;
|
||||
}
|
||||
);
|
||||
if (!season) {
|
||||
this.log('No season found for anidb movie', 'debug', {
|
||||
jellyfinitem,
|
||||
});
|
||||
return;
|
||||
}
|
||||
const episodes = await this.jfClient.getEpisodes(
|
||||
jellyfinitem.Id,
|
||||
season.Id
|
||||
);
|
||||
if (!episodes[0]) {
|
||||
this.log('No episode found for anidb movie', 'debug', {
|
||||
jellyfinitem,
|
||||
});
|
||||
return;
|
||||
}
|
||||
metadata = await this.jfClient.getItemData(episodes[0].Id);
|
||||
if (!metadata) {
|
||||
this.log('No metadata found for anidb movie', 'debug', {
|
||||
jellyfinitem,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const has4k = metadata.MediaSources?.some((MediaSource) => {
|
||||
return MediaSource.MediaStreams.filter(
|
||||
(MediaStream) => MediaStream.Type === 'Video'
|
||||
@@ -152,12 +100,6 @@ class JellyfinScanner {
|
||||
});
|
||||
|
||||
await this.asyncLock.dispatch(newMedia.tmdbId, async () => {
|
||||
if (!metadata) {
|
||||
// this will never execute, but typescript thinks somebody could reset tvShow from
|
||||
// outer scope back to null before this async gets called
|
||||
return;
|
||||
}
|
||||
|
||||
const existing = await this.getExisting(
|
||||
newMedia.tmdbId,
|
||||
MediaType.MOVIE
|
||||
@@ -250,42 +192,6 @@ class JellyfinScanner {
|
||||
}
|
||||
}
|
||||
|
||||
private async getTvShow({
|
||||
tmdbId,
|
||||
tvdbId,
|
||||
}: {
|
||||
tmdbId?: number;
|
||||
tvdbId?: number;
|
||||
}): Promise<TmdbTvDetails> {
|
||||
let tvShow;
|
||||
|
||||
if (tmdbId) {
|
||||
tvShow = await this.tmdb.getTvShow({
|
||||
tvId: Number(tmdbId),
|
||||
});
|
||||
} else if (tvdbId) {
|
||||
tvShow = await this.tmdb.getShowByTvdbId({
|
||||
tvdbId: Number(tvdbId),
|
||||
});
|
||||
} else {
|
||||
throw new Error('No ID provided');
|
||||
}
|
||||
|
||||
const metadataProvider = tvShow.keywords.results.some(
|
||||
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
|
||||
)
|
||||
? await getMetadataProvider('anime')
|
||||
: await getMetadataProvider('tv');
|
||||
|
||||
if (!(metadataProvider instanceof TheMovieDb)) {
|
||||
tvShow = await metadataProvider.getTvShow({
|
||||
tvId: Number(tmdbId),
|
||||
});
|
||||
}
|
||||
|
||||
return tvShow;
|
||||
}
|
||||
|
||||
private async processShow(jellyfinitem: JellyfinLibraryItem) {
|
||||
const mediaRepository = getRepository(Media);
|
||||
|
||||
@@ -306,8 +212,8 @@ class JellyfinScanner {
|
||||
|
||||
if (metadata.ProviderIds.Tmdb) {
|
||||
try {
|
||||
tvShow = await this.getTvShow({
|
||||
tmdbId: Number(metadata.ProviderIds.Tmdb),
|
||||
tvShow = await this.tmdb.getTvShow({
|
||||
tvId: Number(metadata.ProviderIds.Tmdb),
|
||||
});
|
||||
} catch {
|
||||
this.log('Unable to find TMDb ID for this title.', 'debug', {
|
||||
@@ -317,7 +223,7 @@ class JellyfinScanner {
|
||||
}
|
||||
if (!tvShow && metadata.ProviderIds.Tvdb) {
|
||||
try {
|
||||
tvShow = await this.getTvShow({
|
||||
tvShow = await this.tmdb.getShowByTvdbId({
|
||||
tvdbId: Number(metadata.ProviderIds.Tvdb),
|
||||
});
|
||||
} catch {
|
||||
@@ -326,28 +232,6 @@ class JellyfinScanner {
|
||||
});
|
||||
}
|
||||
}
|
||||
let tvdbSeasonFromAnidb: number | undefined;
|
||||
if (!tvShow && metadata.ProviderIds.AniDB) {
|
||||
const anidbId = Number(metadata.ProviderIds.AniDB);
|
||||
const result = animeList.getFromAnidbId(anidbId);
|
||||
tvdbSeasonFromAnidb = result?.tvdbSeason;
|
||||
if (result?.tvdbId) {
|
||||
try {
|
||||
tvShow = await this.tmdb.getShowByTvdbId({
|
||||
tvdbId: result.tvdbId,
|
||||
});
|
||||
} catch {
|
||||
this.log('Unable to find AniDB ID for this title.', 'debug', {
|
||||
jellyfinitem,
|
||||
});
|
||||
}
|
||||
}
|
||||
// With AniDB we can have mixed libraries with movies in a "show" library
|
||||
else if (result?.imdbId || result?.tmdbId) {
|
||||
await this.processMovie(jellyfinitem);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (tvShow) {
|
||||
await this.asyncLock.dispatch(tvShow.id, async () => {
|
||||
@@ -376,20 +260,9 @@ class JellyfinScanner {
|
||||
|
||||
for (const season of seasons) {
|
||||
const JellyfinSeasons = await this.jfClient.getSeasons(Id);
|
||||
const matchedJellyfinSeason = JellyfinSeasons.find((md) => {
|
||||
if (tvdbSeasonFromAnidb) {
|
||||
// In AniDB we don't have the concept of seasons,
|
||||
// we have multiple shows with only Season 1 (and sometimes a season with index 0 for specials).
|
||||
// We use tvdbSeasonFromAnidb to check if we are on the correct TMDB season and
|
||||
// md.IndexNumber === 1 to be sure to find the correct season on jellyfin
|
||||
return (
|
||||
tvdbSeasonFromAnidb === season.season_number &&
|
||||
md.IndexNumber === 1
|
||||
);
|
||||
} else {
|
||||
return Number(md.IndexNumber) === season.season_number;
|
||||
}
|
||||
});
|
||||
const matchedJellyfinSeason = JellyfinSeasons.find(
|
||||
(md) => Number(md.IndexNumber) === season.season_number
|
||||
);
|
||||
|
||||
const existingSeason = media?.seasons.find(
|
||||
(es) => es.seasonNumber === season.season_number
|
||||
@@ -442,29 +315,6 @@ class JellyfinScanner {
|
||||
}
|
||||
}
|
||||
|
||||
// With AniDB we can have multiple shows for one season, so we need to save
|
||||
// the episode from all the jellyfin entries to get the total
|
||||
if (tvdbSeasonFromAnidb) {
|
||||
if (this.processedAnidbSeason.has(tvShow.id)) {
|
||||
const show = this.processedAnidbSeason.get(tvShow.id)!;
|
||||
if (show.has(season.season_number)) {
|
||||
show.set(
|
||||
season.season_number,
|
||||
show.get(season.season_number)! + totalStandard
|
||||
);
|
||||
|
||||
totalStandard = show.get(season.season_number)!;
|
||||
} else {
|
||||
show.set(season.season_number, totalStandard);
|
||||
}
|
||||
} else {
|
||||
this.processedAnidbSeason.set(
|
||||
tvShow.id,
|
||||
new Map([[season.season_number, totalStandard]])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
media &&
|
||||
(totalStandard > 0 || (total4k > 0 && !this.enable4kShow)) &&
|
||||
@@ -677,7 +527,6 @@ class JellyfinScanner {
|
||||
}
|
||||
|
||||
private async processItems(slicedItems: JellyfinLibraryItem[]) {
|
||||
this.processedAnidbSeason = new Map();
|
||||
await Promise.all(
|
||||
slicedItems.map(async (item) => {
|
||||
if (item.Type === 'Movie') {
|
||||
@@ -775,8 +624,6 @@ class JellyfinScanner {
|
||||
(library) => library.enabled
|
||||
);
|
||||
|
||||
await animeList.sync();
|
||||
|
||||
this.enable4kMovie = settings.radarr.some((radarr) => radarr.is4k);
|
||||
if (this.enable4kMovie) {
|
||||
this.log(
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
import animeList from '@server/api/animelist';
|
||||
import { getMetadataProvider } from '@server/api/metadata';
|
||||
import type { PlexLibraryItem, PlexMetadata } from '@server/api/plexapi';
|
||||
import PlexAPI from '@server/api/plexapi';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
|
||||
import type {
|
||||
TmdbKeyword,
|
||||
TmdbTvDetails,
|
||||
} from '@server/api/themoviedb/interfaces';
|
||||
import type { TmdbTvDetails } from '@server/api/themoviedb/interfaces';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import { User } from '@server/entity/User';
|
||||
import cacheManager from '@server/lib/cache';
|
||||
@@ -255,42 +249,6 @@ class PlexScanner
|
||||
});
|
||||
}
|
||||
|
||||
private async getTvShow({
|
||||
tmdbId,
|
||||
tvdbId,
|
||||
}: {
|
||||
tmdbId?: number;
|
||||
tvdbId?: number;
|
||||
}): Promise<TmdbTvDetails> {
|
||||
let tvShow;
|
||||
|
||||
if (tmdbId) {
|
||||
tvShow = await this.tmdb.getTvShow({
|
||||
tvId: Number(tmdbId),
|
||||
});
|
||||
} else if (tvdbId) {
|
||||
tvShow = await this.tmdb.getShowByTvdbId({
|
||||
tvdbId: Number(tvdbId),
|
||||
});
|
||||
} else {
|
||||
throw new Error('No ID provided');
|
||||
}
|
||||
|
||||
const metadataProvider = tvShow.keywords.results.some(
|
||||
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
|
||||
)
|
||||
? await getMetadataProvider('anime')
|
||||
: await getMetadataProvider('tv');
|
||||
|
||||
if (!(metadataProvider instanceof TheMovieDb)) {
|
||||
tvShow = await metadataProvider.getTvShow({
|
||||
tvId: Number(tmdbId),
|
||||
});
|
||||
}
|
||||
|
||||
return tvShow;
|
||||
}
|
||||
|
||||
private async processPlexShow(plexitem: PlexLibraryItem) {
|
||||
const ratingKey =
|
||||
plexitem.grandparentRatingKey ??
|
||||
@@ -315,9 +273,7 @@ class PlexScanner
|
||||
await this.processHamaSpecials(metadata, mediaIds.tvdbId);
|
||||
}
|
||||
|
||||
const tvShow = await this.getTvShow({
|
||||
tmdbId: mediaIds.tmdbId,
|
||||
});
|
||||
const tvShow = await this.tmdb.getTvShow({ tvId: mediaIds.tmdbId });
|
||||
|
||||
const seasons = tvShow.seasons;
|
||||
const processableSeasons: ProcessableSeason[] = [];
|
||||
|
||||
@@ -100,16 +100,6 @@ interface Quota {
|
||||
quotaDays?: number;
|
||||
}
|
||||
|
||||
export enum MetadataProviderType {
|
||||
TMDB = 'tmdb',
|
||||
TVDB = 'tvdb',
|
||||
}
|
||||
|
||||
export interface MetadataSettings {
|
||||
tv: MetadataProviderType;
|
||||
anime: MetadataProviderType;
|
||||
}
|
||||
|
||||
export interface ProxySettings {
|
||||
enabled: boolean;
|
||||
hostname: string;
|
||||
@@ -148,29 +138,11 @@ export interface MainSettings {
|
||||
youtubeUrl: string;
|
||||
}
|
||||
|
||||
export interface ProxySettings {
|
||||
enabled: boolean;
|
||||
hostname: string;
|
||||
port: number;
|
||||
useSsl: boolean;
|
||||
user: string;
|
||||
password: string;
|
||||
bypassFilter: string;
|
||||
bypassLocalAddresses: boolean;
|
||||
}
|
||||
|
||||
export interface DnsCacheSettings {
|
||||
enabled: boolean;
|
||||
forceMinTtl?: number;
|
||||
forceMaxTtl?: number;
|
||||
}
|
||||
|
||||
export interface NetworkSettings {
|
||||
csrfProtection: boolean;
|
||||
forceIpv4First: boolean;
|
||||
trustProxy: boolean;
|
||||
proxy: ProxySettings;
|
||||
dnsCache: DnsCacheSettings;
|
||||
}
|
||||
|
||||
interface PublicSettings {
|
||||
@@ -207,7 +179,6 @@ interface FullPublicSettings extends PublicSettings {
|
||||
|
||||
export interface NotificationAgentConfig {
|
||||
enabled: boolean;
|
||||
embedPoster: boolean;
|
||||
types?: number;
|
||||
options: Record<string, unknown>;
|
||||
}
|
||||
@@ -245,6 +216,13 @@ export interface NotificationAgentEmail extends NotificationAgentConfig {
|
||||
};
|
||||
}
|
||||
|
||||
export interface NotificationAgentLunaSea extends NotificationAgentConfig {
|
||||
options: {
|
||||
webhookUrl: string;
|
||||
profileName?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface NotificationAgentTelegram extends NotificationAgentConfig {
|
||||
options: {
|
||||
botUsername?: string;
|
||||
@@ -275,7 +253,6 @@ export interface NotificationAgentWebhook extends NotificationAgentConfig {
|
||||
webhookUrl: string;
|
||||
jsonPayload: string;
|
||||
authHeader?: string;
|
||||
supportVariables?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -317,6 +294,7 @@ interface NotificationAgents {
|
||||
email: NotificationAgentEmail;
|
||||
gotify: NotificationAgentGotify;
|
||||
ntfy: NotificationAgentNtfy;
|
||||
lunasea: NotificationAgentLunaSea;
|
||||
pushbullet: NotificationAgentPushbullet;
|
||||
pushover: NotificationAgentPushover;
|
||||
slack: NotificationAgentSlack;
|
||||
@@ -362,8 +340,6 @@ export interface AllSettings {
|
||||
notifications: NotificationSettings;
|
||||
jobs: Record<JobId, JobSettings>;
|
||||
network: NetworkSettings;
|
||||
metadataSettings: MetadataSettings;
|
||||
migrations: string[];
|
||||
}
|
||||
|
||||
const SETTINGS_PATH = process.env.CONFIG_DIRECTORY
|
||||
@@ -424,10 +400,6 @@ class Settings {
|
||||
apiKey: '',
|
||||
},
|
||||
tautulli: {},
|
||||
metadataSettings: {
|
||||
tv: MetadataProviderType.TMDB,
|
||||
anime: MetadataProviderType.TMDB,
|
||||
},
|
||||
radarr: [],
|
||||
sonarr: [],
|
||||
public: {
|
||||
@@ -437,7 +409,6 @@ class Settings {
|
||||
agents: {
|
||||
email: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
options: {
|
||||
userEmailRequired: false,
|
||||
emailFrom: '',
|
||||
@@ -452,7 +423,6 @@ class Settings {
|
||||
},
|
||||
discord: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
webhookUrl: '',
|
||||
@@ -460,9 +430,15 @@ class Settings {
|
||||
enableMentions: true,
|
||||
},
|
||||
},
|
||||
lunasea: {
|
||||
enabled: false,
|
||||
types: 0,
|
||||
options: {
|
||||
webhookUrl: '',
|
||||
},
|
||||
},
|
||||
slack: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
webhookUrl: '',
|
||||
@@ -470,7 +446,6 @@ class Settings {
|
||||
},
|
||||
telegram: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
botAPI: '',
|
||||
@@ -481,7 +456,6 @@ class Settings {
|
||||
},
|
||||
pushbullet: {
|
||||
enabled: false,
|
||||
embedPoster: false,
|
||||
types: 0,
|
||||
options: {
|
||||
accessToken: '',
|
||||
@@ -489,7 +463,6 @@ class Settings {
|
||||
},
|
||||
pushover: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
accessToken: '',
|
||||
@@ -499,7 +472,6 @@ class Settings {
|
||||
},
|
||||
webhook: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
webhookUrl: '',
|
||||
@@ -509,12 +481,10 @@ class Settings {
|
||||
},
|
||||
webpush: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
options: {},
|
||||
},
|
||||
gotify: {
|
||||
enabled: false,
|
||||
embedPoster: false,
|
||||
types: 0,
|
||||
options: {
|
||||
url: '',
|
||||
@@ -524,7 +494,6 @@ class Settings {
|
||||
},
|
||||
ntfy: {
|
||||
enabled: false,
|
||||
embedPoster: true,
|
||||
types: 0,
|
||||
options: {
|
||||
url: '',
|
||||
@@ -588,13 +557,7 @@ class Settings {
|
||||
bypassFilter: '',
|
||||
bypassLocalAddresses: true,
|
||||
},
|
||||
dnsCache: {
|
||||
enabled: false,
|
||||
forceMinTtl: 0,
|
||||
forceMaxTtl: -1,
|
||||
},
|
||||
},
|
||||
migrations: [],
|
||||
};
|
||||
if (initialSettings) {
|
||||
this.data = merge(this.data, initialSettings);
|
||||
@@ -633,14 +596,6 @@ class Settings {
|
||||
this.data.tautulli = data;
|
||||
}
|
||||
|
||||
get metadataSettings(): MetadataSettings {
|
||||
return this.data.metadataSettings;
|
||||
}
|
||||
|
||||
set metadataSettings(data: MetadataSettings) {
|
||||
this.data.metadataSettings = data;
|
||||
}
|
||||
|
||||
get radarr(): RadarrSettings[] {
|
||||
return this.data.radarr;
|
||||
}
|
||||
@@ -724,14 +679,6 @@ class Settings {
|
||||
this.data.network = data;
|
||||
}
|
||||
|
||||
get migrations(): string[] {
|
||||
return this.data.migrations;
|
||||
}
|
||||
|
||||
set migrations(data: string[]) {
|
||||
this.data.migrations = data;
|
||||
}
|
||||
|
||||
get clientId(): string {
|
||||
return this.data.clientId;
|
||||
}
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import type { AllSettings } from '@server/lib/settings';
|
||||
|
||||
const removeLunaSeaSetting = (settings: any): AllSettings => {
|
||||
if (
|
||||
settings.notifications &&
|
||||
settings.notifications.agents &&
|
||||
settings.notifications.agents.lunasea
|
||||
) {
|
||||
delete settings.notifications.agents.lunasea;
|
||||
}
|
||||
return settings;
|
||||
};
|
||||
|
||||
export default removeLunaSeaSetting;
|
||||
@@ -1,93 +0,0 @@
|
||||
import RadarrAPI from '@server/api/servarr/radarr';
|
||||
import SonarrAPI from '@server/api/servarr/sonarr';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import { User } from '@server/entity/User';
|
||||
import type { AllSettings } from '@server/lib/settings';
|
||||
|
||||
const migrationArrTags = async (settings: any): Promise<AllSettings> => {
|
||||
if (
|
||||
Array.isArray(settings.migrations) &&
|
||||
settings.migrations.includes('0007_migrate_arr_tags')
|
||||
) {
|
||||
return settings;
|
||||
}
|
||||
|
||||
const userRepository = getRepository(User);
|
||||
const users = await userRepository.find({
|
||||
select: ['id'],
|
||||
});
|
||||
|
||||
let errorOccurred = false;
|
||||
|
||||
for (const radarrSettings of settings.radarr || []) {
|
||||
if (!radarrSettings.tagRequests) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const radarr = new RadarrAPI({
|
||||
apiKey: radarrSettings.apiKey,
|
||||
url: RadarrAPI.buildUrl(radarrSettings, '/api/v3'),
|
||||
});
|
||||
const radarrTags = await radarr.getTags();
|
||||
for (const user of users) {
|
||||
const userTag = radarrTags.find((v) =>
|
||||
v.label.startsWith(user.id + ' - ')
|
||||
);
|
||||
if (!userTag) {
|
||||
continue;
|
||||
}
|
||||
await radarr.renameTag({
|
||||
id: userTag.id,
|
||||
label: userTag.label.replace(`${user.id} - `, `${user.id}-`),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Unable to rename Radarr tags to the new format. Please check your Radarr connection settings for the instance "${radarrSettings.name}".`,
|
||||
error.message
|
||||
);
|
||||
errorOccurred = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (const sonarrSettings of settings.sonarr || []) {
|
||||
if (!sonarrSettings.tagRequests) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const sonarr = new SonarrAPI({
|
||||
apiKey: sonarrSettings.apiKey,
|
||||
url: SonarrAPI.buildUrl(sonarrSettings, '/api/v3'),
|
||||
});
|
||||
const sonarrTags = await sonarr.getTags();
|
||||
for (const user of users) {
|
||||
const userTag = sonarrTags.find((v) =>
|
||||
v.label.startsWith(user.id + ' - ')
|
||||
);
|
||||
if (!userTag) {
|
||||
continue;
|
||||
}
|
||||
await sonarr.renameTag({
|
||||
id: userTag.id,
|
||||
label: userTag.label.replace(`${user.id} - `, `${user.id}-`),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Unable to rename Sonarr tags to the new format. Please check your Sonarr connection settings for the instance "${sonarrSettings.name}".`,
|
||||
error.message
|
||||
);
|
||||
errorOccurred = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!errorOccurred) {
|
||||
if (!Array.isArray(settings.migrations)) {
|
||||
settings.migrations = [];
|
||||
}
|
||||
settings.migrations.push('0007_migrate_arr_tags');
|
||||
}
|
||||
return settings;
|
||||
};
|
||||
|
||||
export default migrationArrTags;
|
||||
@@ -124,7 +124,7 @@ const mapEpisodeResult = (episode: TmdbTvEpisodeResult): Episode => ({
|
||||
seasonNumber: episode.season_number,
|
||||
showId: episode.show_id,
|
||||
voteAverage: episode.vote_average,
|
||||
voteCount: episode.vote_count,
|
||||
voteCount: episode.vote_cuont,
|
||||
stillPath: episode.still_path,
|
||||
});
|
||||
|
||||
|
||||
@@ -61,7 +61,6 @@ const QueryFilterOptions = z.object({
|
||||
studio: z.coerce.string().optional(),
|
||||
genre: z.coerce.string().optional(),
|
||||
keywords: z.coerce.string().optional(),
|
||||
excludeKeywords: z.coerce.string().optional(),
|
||||
language: z.coerce.string().optional(),
|
||||
withRuntimeGte: z.coerce.string().optional(),
|
||||
withRuntimeLte: z.coerce.string().optional(),
|
||||
@@ -91,7 +90,6 @@ discoverRoutes.get('/movies', async (req, res, next) => {
|
||||
try {
|
||||
const query = ApiQuerySchema.parse(req.query);
|
||||
const keywords = query.keywords;
|
||||
const excludeKeywords = query.excludeKeywords;
|
||||
|
||||
const data = await tmdb.getDiscoverMovies({
|
||||
page: Number(query.page),
|
||||
@@ -107,7 +105,6 @@ discoverRoutes.get('/movies', async (req, res, next) => {
|
||||
? new Date(query.primaryReleaseDateGte).toISOString().split('T')[0]
|
||||
: undefined,
|
||||
keywords,
|
||||
excludeKeywords,
|
||||
withRuntimeGte: query.withRuntimeGte,
|
||||
withRuntimeLte: query.withRuntimeLte,
|
||||
voteAverageGte: query.voteAverageGte,
|
||||
@@ -131,15 +128,11 @@ discoverRoutes.get('/movies', async (req, res, next) => {
|
||||
if (keywords) {
|
||||
const splitKeywords = keywords.split(',');
|
||||
|
||||
const keywordResults = await Promise.all(
|
||||
keywordData = await Promise.all(
|
||||
splitKeywords.map(async (keywordId) => {
|
||||
return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) });
|
||||
})
|
||||
);
|
||||
|
||||
keywordData = keywordResults.filter(
|
||||
(keyword): keyword is TmdbKeyword => keyword !== null
|
||||
);
|
||||
}
|
||||
|
||||
return res.status(200).json({
|
||||
@@ -384,7 +377,6 @@ discoverRoutes.get('/tv', async (req, res, next) => {
|
||||
try {
|
||||
const query = ApiQuerySchema.parse(req.query);
|
||||
const keywords = query.keywords;
|
||||
const excludeKeywords = query.excludeKeywords;
|
||||
const data = await tmdb.getDiscoverTv({
|
||||
page: Number(query.page),
|
||||
sortBy: query.sortBy as SortOptions,
|
||||
@@ -399,7 +391,6 @@ discoverRoutes.get('/tv', async (req, res, next) => {
|
||||
: undefined,
|
||||
originalLanguage: query.language,
|
||||
keywords,
|
||||
excludeKeywords,
|
||||
withRuntimeGte: query.withRuntimeGte,
|
||||
withRuntimeLte: query.withRuntimeLte,
|
||||
voteAverageGte: query.voteAverageGte,
|
||||
@@ -424,15 +415,11 @@ discoverRoutes.get('/tv', async (req, res, next) => {
|
||||
if (keywords) {
|
||||
const splitKeywords = keywords.split(',');
|
||||
|
||||
const keywordResults = await Promise.all(
|
||||
keywordData = await Promise.all(
|
||||
splitKeywords.map(async (keywordId) => {
|
||||
return await tmdb.getKeywordDetails({ keywordId: Number(keywordId) });
|
||||
})
|
||||
);
|
||||
|
||||
keywordData = keywordResults.filter(
|
||||
(keyword): keyword is TmdbKeyword => keyword !== null
|
||||
);
|
||||
}
|
||||
|
||||
return res.status(200).json({
|
||||
|
||||
@@ -4,40 +4,27 @@ import { Router } from 'express';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Delay the initialization of ImageProxy instances until the proxy (if any) is properly configured
|
||||
let _tmdbImageProxy: ImageProxy;
|
||||
function initTmdbImageProxy() {
|
||||
if (!_tmdbImageProxy) {
|
||||
_tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', {
|
||||
rateLimitOptions: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
},
|
||||
});
|
||||
}
|
||||
return _tmdbImageProxy;
|
||||
}
|
||||
let _tvdbImageProxy: ImageProxy;
|
||||
function initTvdbImageProxy() {
|
||||
if (!_tvdbImageProxy) {
|
||||
_tvdbImageProxy = new ImageProxy('tvdb', 'https://artworks.thetvdb.com', {
|
||||
rateLimitOptions: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
},
|
||||
});
|
||||
}
|
||||
return _tvdbImageProxy;
|
||||
}
|
||||
const tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', {
|
||||
rateLimitOptions: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
},
|
||||
});
|
||||
const tvdbImageProxy = new ImageProxy('tvdb', 'https://artworks.thetvdb.com', {
|
||||
rateLimitOptions: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
},
|
||||
});
|
||||
|
||||
router.get('/:type/*', async (req, res) => {
|
||||
const imagePath = req.path.replace(/^\/\w+/, '');
|
||||
try {
|
||||
let imageData;
|
||||
if (req.params.type === 'tmdb') {
|
||||
imageData = await initTmdbImageProxy().getImage(imagePath);
|
||||
imageData = await tmdbImageProxy.getImage(imagePath);
|
||||
} else if (req.params.type === 'tvdb') {
|
||||
imageData = await initTvdbImageProxy().getImage(imagePath);
|
||||
imageData = await tvdbImageProxy.getImage(imagePath);
|
||||
} else {
|
||||
logger.error('Unsupported image type', {
|
||||
imagePath,
|
||||
|
||||
@@ -54,7 +54,6 @@ issueRoutes.get<Record<string, string>, IssueResultsResponse>(
|
||||
.leftJoinAndSelect('issue.createdBy', 'createdBy')
|
||||
.leftJoinAndSelect('issue.media', 'media')
|
||||
.leftJoinAndSelect('issue.modifiedBy', 'modifiedBy')
|
||||
.leftJoinAndSelect('issue.comments', 'comments')
|
||||
.where('issue.status IN (:...issueStatus)', {
|
||||
issueStatus: statusFilter,
|
||||
});
|
||||
|
||||
@@ -197,10 +197,8 @@ mediaRoutes.delete(
|
||||
const media = await mediaRepository.findOneOrFail({
|
||||
where: { id: Number(req.params.id) },
|
||||
});
|
||||
|
||||
const is4k = req.query.is4k === 'true';
|
||||
const is4k = media.serviceUrl4k !== undefined;
|
||||
const isMovie = media.mediaType === MediaType.MOVIE;
|
||||
|
||||
let serviceSettings;
|
||||
if (isMovie) {
|
||||
serviceSettings = settings.radarr.find(
|
||||
@@ -227,7 +225,6 @@ mediaRoutes.delete(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!serviceSettings) {
|
||||
logger.warn(
|
||||
`There is no default ${
|
||||
@@ -242,7 +239,6 @@ mediaRoutes.delete(
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let service;
|
||||
if (isMovie) {
|
||||
service = new RadarrAPI({
|
||||
|
||||
@@ -381,12 +381,6 @@ requestRoutes.get('/count', async (_req, res, next) => {
|
||||
)
|
||||
.getCount();
|
||||
|
||||
const completedCount = await query
|
||||
.where('request.status = :requestStatus', {
|
||||
requestStatus: MediaRequestStatus.COMPLETED,
|
||||
})
|
||||
.getCount();
|
||||
|
||||
return res.status(200).json({
|
||||
total: totalCount,
|
||||
movie: movieCount,
|
||||
@@ -396,7 +390,6 @@ requestRoutes.get('/count', async (_req, res, next) => {
|
||||
declined: declinedCount,
|
||||
processing: processingCount,
|
||||
available: availableCount,
|
||||
completed: completedCount,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong retrieving request counts', {
|
||||
|
||||
@@ -28,9 +28,7 @@ import discoverSettingRoutes from '@server/routes/settings/discover';
|
||||
import { ApiError } from '@server/types/error';
|
||||
import { appDataPath } from '@server/utils/appDataVolume';
|
||||
import { getAppVersion } from '@server/utils/appVersion';
|
||||
import { dnsCache } from '@server/utils/dnsCache';
|
||||
import { getHostname } from '@server/utils/getHostname';
|
||||
import type { DnsEntries, DnsStats } from 'dns-caching';
|
||||
import { Router } from 'express';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import fs from 'fs';
|
||||
@@ -39,7 +37,6 @@ import { rescheduleJob } from 'node-schedule';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
import { URL } from 'url';
|
||||
import metadataRoutes from './metadata';
|
||||
import notificationRoutes from './notifications';
|
||||
import radarrRoutes from './radarr';
|
||||
import sonarrRoutes from './sonarr';
|
||||
@@ -50,7 +47,6 @@ settingsRoutes.use('/notifications', notificationRoutes);
|
||||
settingsRoutes.use('/radarr', radarrRoutes);
|
||||
settingsRoutes.use('/sonarr', sonarrRoutes);
|
||||
settingsRoutes.use('/discover', discoverSettingRoutes);
|
||||
settingsRoutes.use('/metadatas', metadataRoutes);
|
||||
|
||||
const filteredMainSettings = (
|
||||
user: User,
|
||||
@@ -759,19 +755,12 @@ settingsRoutes.get('/cache', async (_req, res) => {
|
||||
const tmdbImageCache = await ImageProxy.getImageStats('tmdb');
|
||||
const avatarImageCache = await ImageProxy.getImageStats('avatar');
|
||||
|
||||
const stats: DnsStats | undefined = dnsCache?.getStats();
|
||||
const entries: DnsEntries | undefined = dnsCache?.getCacheEntries();
|
||||
|
||||
return res.status(200).json({
|
||||
apiCaches,
|
||||
imageCache: {
|
||||
tmdb: tmdbImageCache,
|
||||
avatar: avatarImageCache,
|
||||
},
|
||||
dnsCache: {
|
||||
stats,
|
||||
entries,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -789,20 +778,6 @@ settingsRoutes.post<{ cacheId: AvailableCacheIds }>(
|
||||
}
|
||||
);
|
||||
|
||||
settingsRoutes.post<{ dnsEntry: string }>(
|
||||
'/cache/dns/:dnsEntry/flush',
|
||||
(req, res, next) => {
|
||||
const dnsEntry = req.params.dnsEntry;
|
||||
|
||||
if (dnsCache) {
|
||||
dnsCache.clear(dnsEntry);
|
||||
return res.status(204).send();
|
||||
}
|
||||
|
||||
next({ status: 404, message: 'Cache not found.' });
|
||||
}
|
||||
);
|
||||
|
||||
settingsRoutes.post(
|
||||
'/initialize',
|
||||
isAuthenticated(Permission.ADMIN),
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import Tvdb from '@server/api/tvdb';
|
||||
import {
|
||||
getSettings,
|
||||
MetadataProviderType,
|
||||
type MetadataSettings,
|
||||
} from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import { Router } from 'express';
|
||||
|
||||
function getTestResultString(testValue: number): string {
|
||||
if (testValue === -1) return 'not tested';
|
||||
if (testValue === 0) return 'failed';
|
||||
return 'ok';
|
||||
}
|
||||
|
||||
const metadataRoutes = Router();
|
||||
|
||||
metadataRoutes.get('/', (_req, res) => {
|
||||
const settings = getSettings();
|
||||
res.status(200).json({
|
||||
tv: settings.metadataSettings.tv,
|
||||
anime: settings.metadataSettings.anime,
|
||||
});
|
||||
});
|
||||
|
||||
metadataRoutes.put('/', async (req, res) => {
|
||||
const settings = getSettings();
|
||||
const body = req.body as MetadataSettings;
|
||||
|
||||
let tvdbTest = -1;
|
||||
let tmdbTest = -1;
|
||||
|
||||
try {
|
||||
if (
|
||||
body.tv === MetadataProviderType.TVDB ||
|
||||
body.anime === MetadataProviderType.TVDB
|
||||
) {
|
||||
tvdbTest = 0;
|
||||
const tvdb = await Tvdb.getInstance();
|
||||
await tvdb.test();
|
||||
tvdbTest = 1;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to test metadata provider', {
|
||||
label: 'Metadata',
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
if (
|
||||
body.tv === MetadataProviderType.TMDB ||
|
||||
body.anime === MetadataProviderType.TMDB
|
||||
) {
|
||||
tmdbTest = 0;
|
||||
const tmdb = new TheMovieDb();
|
||||
await tmdb.getTvShow({ tvId: 1054 });
|
||||
tmdbTest = 1;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to test metadata provider', {
|
||||
label: 'MetadataProvider',
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
|
||||
// If a test failed, return the test results
|
||||
if (tvdbTest === 0 || tmdbTest === 0) {
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
tests: {
|
||||
tvdb: getTestResultString(tvdbTest),
|
||||
tmdb: getTestResultString(tmdbTest),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
settings.metadataSettings = {
|
||||
tv: body.tv,
|
||||
anime: body.anime,
|
||||
};
|
||||
await settings.save();
|
||||
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
tv: body.tv,
|
||||
anime: body.anime,
|
||||
tests: {
|
||||
tvdb: getTestResultString(tvdbTest),
|
||||
tmdb: getTestResultString(tmdbTest),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
metadataRoutes.post('/test', async (req, res) => {
|
||||
let tvdbTest = -1;
|
||||
let tmdbTest = -1;
|
||||
|
||||
try {
|
||||
const body = req.body as { tmdb: boolean; tvdb: boolean };
|
||||
|
||||
try {
|
||||
if (body.tmdb) {
|
||||
tmdbTest = 0;
|
||||
const tmdb = new TheMovieDb();
|
||||
await tmdb.getTvShow({ tvId: 1054 });
|
||||
tmdbTest = 1;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to test metadata provider', {
|
||||
label: 'MetadataProvider',
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
if (body.tvdb) {
|
||||
tvdbTest = 0;
|
||||
const tvdb = await Tvdb.getInstance();
|
||||
await tvdb.test();
|
||||
tvdbTest = 1;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to test metadata provider', {
|
||||
label: 'MetadataProvider',
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
|
||||
const success = !(tvdbTest === 0 || tmdbTest === 0);
|
||||
const statusCode = success ? 200 : 500;
|
||||
|
||||
return res.status(statusCode).json({
|
||||
success: success,
|
||||
tests: {
|
||||
tmdb: getTestResultString(tmdbTest),
|
||||
tvdb: getTestResultString(tvdbTest),
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
tests: {
|
||||
tmdb: getTestResultString(tmdbTest),
|
||||
tvdb: getTestResultString(tvdbTest),
|
||||
},
|
||||
error: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default metadataRoutes;
|
||||
@@ -4,6 +4,7 @@ import type { NotificationAgent } from '@server/lib/notifications/agents/agent';
|
||||
import DiscordAgent from '@server/lib/notifications/agents/discord';
|
||||
import EmailAgent from '@server/lib/notifications/agents/email';
|
||||
import GotifyAgent from '@server/lib/notifications/agents/gotify';
|
||||
import LunaSeaAgent from '@server/lib/notifications/agents/lunasea';
|
||||
import NtfyAgent from '@server/lib/notifications/agents/ntfy';
|
||||
import PushbulletAgent from '@server/lib/notifications/agents/pushbullet';
|
||||
import PushoverAgent from '@server/lib/notifications/agents/pushover';
|
||||
@@ -270,7 +271,6 @@ notificationRoutes.get('/webhook', (_req, res) => {
|
||||
|
||||
const response: typeof webhookSettings = {
|
||||
enabled: webhookSettings.enabled,
|
||||
embedPoster: webhookSettings.embedPoster,
|
||||
types: webhookSettings.types,
|
||||
options: {
|
||||
...webhookSettings.options,
|
||||
@@ -279,7 +279,6 @@ notificationRoutes.get('/webhook', (_req, res) => {
|
||||
'utf8'
|
||||
)
|
||||
),
|
||||
supportVariables: webhookSettings.options.supportVariables ?? false,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -293,7 +292,6 @@ notificationRoutes.post('/webhook', async (req, res, next) => {
|
||||
|
||||
settings.notifications.agents.webhook = {
|
||||
enabled: req.body.enabled,
|
||||
embedPoster: req.body.embedPoster,
|
||||
types: req.body.types,
|
||||
options: {
|
||||
jsonPayload: Buffer.from(req.body.options.jsonPayload).toString(
|
||||
@@ -301,7 +299,6 @@ notificationRoutes.post('/webhook', async (req, res, next) => {
|
||||
),
|
||||
webhookUrl: req.body.options.webhookUrl,
|
||||
authHeader: req.body.options.authHeader,
|
||||
supportVariables: req.body.options.supportVariables ?? false,
|
||||
},
|
||||
};
|
||||
await settings.save();
|
||||
@@ -325,7 +322,6 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
|
||||
|
||||
const testBody = {
|
||||
enabled: req.body.enabled,
|
||||
embedPoster: req.body.embedPoster,
|
||||
types: req.body.types,
|
||||
options: {
|
||||
jsonPayload: Buffer.from(req.body.options.jsonPayload).toString(
|
||||
@@ -333,7 +329,6 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
|
||||
),
|
||||
webhookUrl: req.body.options.webhookUrl,
|
||||
authHeader: req.body.options.authHeader,
|
||||
supportVariables: req.body.options.supportVariables ?? false,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -351,6 +346,40 @@ notificationRoutes.post('/webhook/test', async (req, res, next) => {
|
||||
}
|
||||
});
|
||||
|
||||
notificationRoutes.get('/lunasea', (_req, res) => {
|
||||
const settings = getSettings();
|
||||
|
||||
res.status(200).json(settings.notifications.agents.lunasea);
|
||||
});
|
||||
|
||||
notificationRoutes.post('/lunasea', async (req, res) => {
|
||||
const settings = getSettings();
|
||||
|
||||
settings.notifications.agents.lunasea = req.body;
|
||||
await settings.save();
|
||||
|
||||
res.status(200).json(settings.notifications.agents.lunasea);
|
||||
});
|
||||
|
||||
notificationRoutes.post('/lunasea/test', async (req, res, next) => {
|
||||
if (!req.user) {
|
||||
return next({
|
||||
status: 500,
|
||||
message: 'User information is missing from the request.',
|
||||
});
|
||||
}
|
||||
|
||||
const lunaseaAgent = new LunaSeaAgent(req.body);
|
||||
if (await sendTestNotification(lunaseaAgent, req.user)) {
|
||||
return res.status(204).send();
|
||||
} else {
|
||||
return next({
|
||||
status: 500,
|
||||
message: 'Failed to send web push notification.',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
notificationRoutes.get('/gotify', (_req, res) => {
|
||||
const settings = getSettings();
|
||||
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { getMetadataProvider } from '@server/api/metadata';
|
||||
import RottenTomatoes from '@server/api/rating/rottentomatoes';
|
||||
import TheMovieDb from '@server/api/themoviedb';
|
||||
import { ANIME_KEYWORD_ID } from '@server/api/themoviedb/constants';
|
||||
import type { TmdbKeyword } from '@server/api/themoviedb/interfaces';
|
||||
import { MediaType } from '@server/constants/media';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import Media from '@server/entity/Media';
|
||||
@@ -16,20 +13,12 @@ const tvRoutes = Router();
|
||||
|
||||
tvRoutes.get('/:id', async (req, res, next) => {
|
||||
const tmdb = new TheMovieDb();
|
||||
|
||||
try {
|
||||
const tmdbTv = await tmdb.getTvShow({
|
||||
tvId: Number(req.params.id),
|
||||
});
|
||||
const metadataProvider = tmdbTv.keywords.results.some(
|
||||
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
|
||||
)
|
||||
? await getMetadataProvider('anime')
|
||||
: await getMetadataProvider('tv');
|
||||
const tv = await metadataProvider.getTvShow({
|
||||
const tv = await tmdb.getTvShow({
|
||||
tvId: Number(req.params.id),
|
||||
language: (req.query.language as string) ?? req.locale,
|
||||
});
|
||||
|
||||
const media = await Media.getMedia(tv.id, MediaType.TV);
|
||||
|
||||
const onUserWatchlist = await getRepository(Watchlist).exist({
|
||||
@@ -45,9 +34,7 @@ tvRoutes.get('/:id', async (req, res, next) => {
|
||||
|
||||
// TMDB issue where it doesnt fallback to English when no overview is available in requested locale.
|
||||
if (!data.overview) {
|
||||
const tvEnglish = await metadataProvider.getTvShow({
|
||||
tvId: Number(req.params.id),
|
||||
});
|
||||
const tvEnglish = await tmdb.getTvShow({ tvId: Number(req.params.id) });
|
||||
data.overview = tvEnglish.overview;
|
||||
}
|
||||
|
||||
@@ -66,18 +53,10 @@ tvRoutes.get('/:id', async (req, res, next) => {
|
||||
});
|
||||
|
||||
tvRoutes.get('/:id/season/:seasonNumber', async (req, res, next) => {
|
||||
try {
|
||||
const tmdb = new TheMovieDb();
|
||||
const tmdbTv = await tmdb.getTvShow({
|
||||
tvId: Number(req.params.id),
|
||||
});
|
||||
const metadataProvider = tmdbTv.keywords.results.some(
|
||||
(keyword: TmdbKeyword) => keyword.id === ANIME_KEYWORD_ID
|
||||
)
|
||||
? await getMetadataProvider('anime')
|
||||
: await getMetadataProvider('tv');
|
||||
const tmdb = new TheMovieDb();
|
||||
|
||||
const season = await metadataProvider.getTvSeason({
|
||||
try {
|
||||
const season = await tmdb.getTvSeason({
|
||||
tvId: Number(req.params.id),
|
||||
seasonNumber: Number(req.params.seasonNumber),
|
||||
language: (req.query.language as string) ?? req.locale,
|
||||
|
||||
@@ -33,93 +33,52 @@ import { EventSubscriber } from 'typeorm';
|
||||
export class MediaRequestSubscriber
|
||||
implements EntitySubscriberInterface<MediaRequest>
|
||||
{
|
||||
private async notifyAvailableMovie(
|
||||
entity: MediaRequest,
|
||||
event?: UpdateEvent<MediaRequest>
|
||||
) {
|
||||
// Get fresh media state using event manager
|
||||
let latestMedia: Media | null = null;
|
||||
if (event?.manager) {
|
||||
latestMedia = await event.manager.findOne(Media, {
|
||||
where: { id: entity.media.id },
|
||||
});
|
||||
}
|
||||
if (!latestMedia) {
|
||||
const mediaRepository = getRepository(Media);
|
||||
latestMedia = await mediaRepository.findOne({
|
||||
where: { id: entity.media.id },
|
||||
});
|
||||
}
|
||||
|
||||
// Check availability using fresh media state
|
||||
private async notifyAvailableMovie(entity: MediaRequest) {
|
||||
if (
|
||||
!latestMedia ||
|
||||
latestMedia[entity.is4k ? 'status4k' : 'status'] !== MediaStatus.AVAILABLE
|
||||
entity.media[entity.is4k ? 'status4k' : 'status'] ===
|
||||
MediaStatus.AVAILABLE
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const tmdb = new TheMovieDb();
|
||||
|
||||
const tmdb = new TheMovieDb();
|
||||
try {
|
||||
const movie = await tmdb.getMovie({
|
||||
movieId: entity.media.tmdbId,
|
||||
});
|
||||
|
||||
try {
|
||||
const movie = await tmdb.getMovie({
|
||||
movieId: entity.media.tmdbId,
|
||||
});
|
||||
|
||||
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
|
||||
event: `${entity.is4k ? '4K ' : ''}Movie Request Now Available`,
|
||||
notifyAdmin: false,
|
||||
notifySystem: true,
|
||||
notifyUser: entity.requestedBy,
|
||||
subject: `${movie.title}${
|
||||
movie.release_date ? ` (${movie.release_date.slice(0, 4)})` : ''
|
||||
}`,
|
||||
message: truncate(movie.overview, {
|
||||
length: 500,
|
||||
separator: /\s/,
|
||||
omission: '…',
|
||||
}),
|
||||
media: latestMedia,
|
||||
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${movie.poster_path}`,
|
||||
request: entity,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong sending media notification(s)', {
|
||||
label: 'Notifications',
|
||||
errorMessage: e.message,
|
||||
mediaId: entity.id,
|
||||
});
|
||||
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
|
||||
event: `${entity.is4k ? '4K ' : ''}Movie Request Now Available`,
|
||||
notifyAdmin: false,
|
||||
notifySystem: true,
|
||||
notifyUser: entity.requestedBy,
|
||||
subject: `${movie.title}${
|
||||
movie.release_date ? ` (${movie.release_date.slice(0, 4)})` : ''
|
||||
}`,
|
||||
message: truncate(movie.overview, {
|
||||
length: 500,
|
||||
separator: /\s/,
|
||||
omission: '…',
|
||||
}),
|
||||
media: entity.media,
|
||||
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${movie.poster_path}`,
|
||||
request: entity,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong sending media notification(s)', {
|
||||
label: 'Notifications',
|
||||
errorMessage: e.message,
|
||||
mediaId: entity.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async notifyAvailableSeries(
|
||||
entity: MediaRequest,
|
||||
event?: UpdateEvent<MediaRequest>
|
||||
) {
|
||||
// Get fresh media state with seasons using event manager
|
||||
let latestMedia: Media | null = null;
|
||||
if (event?.manager) {
|
||||
latestMedia = await event.manager.findOne(Media, {
|
||||
where: { id: entity.media.id },
|
||||
relations: { seasons: true },
|
||||
});
|
||||
}
|
||||
if (!latestMedia) {
|
||||
const mediaRepository = getRepository(Media);
|
||||
latestMedia = await mediaRepository.findOne({
|
||||
where: { id: entity.media.id },
|
||||
relations: { seasons: true },
|
||||
});
|
||||
}
|
||||
|
||||
if (!latestMedia) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check availability using fresh media state
|
||||
private async notifyAvailableSeries(entity: MediaRequest) {
|
||||
// Find all seasons in the related media entity
|
||||
// and see if they are available, then we can check
|
||||
// if the request contains the same seasons
|
||||
const requestedSeasons =
|
||||
entity.seasons?.map((entitySeason) => entitySeason.seasonNumber) ?? [];
|
||||
const availableSeasons = latestMedia.seasons.filter(
|
||||
const availableSeasons = entity.media.seasons.filter(
|
||||
(season) =>
|
||||
season[entity.is4k ? 'status4k' : 'status'] === MediaStatus.AVAILABLE &&
|
||||
requestedSeasons.includes(season.seasonNumber)
|
||||
@@ -128,46 +87,44 @@ export class MediaRequestSubscriber
|
||||
availableSeasons.length > 0 &&
|
||||
availableSeasons.length === requestedSeasons.length;
|
||||
|
||||
if (!isMediaAvailable) {
|
||||
return;
|
||||
}
|
||||
if (isMediaAvailable) {
|
||||
const tmdb = new TheMovieDb();
|
||||
|
||||
const tmdb = new TheMovieDb();
|
||||
try {
|
||||
const tv = await tmdb.getTvShow({ tvId: entity.media.tmdbId });
|
||||
|
||||
try {
|
||||
const tv = await tmdb.getTvShow({ tvId: entity.media.tmdbId });
|
||||
|
||||
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
|
||||
event: `${entity.is4k ? '4K ' : ''}Series Request Now Available`,
|
||||
subject: `${tv.name}${
|
||||
tv.first_air_date ? ` (${tv.first_air_date.slice(0, 4)})` : ''
|
||||
}`,
|
||||
message: truncate(tv.overview, {
|
||||
length: 500,
|
||||
separator: /\s/,
|
||||
omission: '…',
|
||||
}),
|
||||
notifyAdmin: false,
|
||||
notifySystem: true,
|
||||
notifyUser: entity.requestedBy,
|
||||
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${tv.poster_path}`,
|
||||
media: latestMedia,
|
||||
extra: [
|
||||
{
|
||||
name: 'Requested Seasons',
|
||||
value: entity.seasons
|
||||
.map((season) => season.seasonNumber)
|
||||
.join(', '),
|
||||
},
|
||||
],
|
||||
request: entity,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong sending media notification(s)', {
|
||||
label: 'Notifications',
|
||||
errorMessage: e.message,
|
||||
mediaId: entity.id,
|
||||
});
|
||||
notificationManager.sendNotification(Notification.MEDIA_AVAILABLE, {
|
||||
event: `${entity.is4k ? '4K ' : ''}Series Request Now Available`,
|
||||
subject: `${tv.name}${
|
||||
tv.first_air_date ? ` (${tv.first_air_date.slice(0, 4)})` : ''
|
||||
}`,
|
||||
message: truncate(tv.overview, {
|
||||
length: 500,
|
||||
separator: /\s/,
|
||||
omission: '…',
|
||||
}),
|
||||
notifyAdmin: false,
|
||||
notifySystem: true,
|
||||
notifyUser: entity.requestedBy,
|
||||
image: `https://image.tmdb.org/t/p/w600_and_h900_bestv2${tv.poster_path}`,
|
||||
media: entity.media,
|
||||
extra: [
|
||||
{
|
||||
name: 'Requested Seasons',
|
||||
value: entity.seasons
|
||||
.map((season) => season.seasonNumber)
|
||||
.join(', '),
|
||||
},
|
||||
],
|
||||
request: entity,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong sending media notification(s)', {
|
||||
label: 'Notifications',
|
||||
errorMessage: e.message,
|
||||
mediaId: entity.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,17 +249,9 @@ export class MediaRequestSubscriber
|
||||
}
|
||||
|
||||
if (radarrSettings.tagRequests) {
|
||||
const radarrTags = await radarr.getTags();
|
||||
// old tags had space around the hyphen
|
||||
let userTag = radarrTags.find((v) =>
|
||||
let userTag = (await radarr.getTags()).find((v) =>
|
||||
v.label.startsWith(entity.requestedBy.id + ' - ')
|
||||
);
|
||||
// new tags do not have spaces around the hyphen, since spaces are not allowed anymore
|
||||
if (!userTag) {
|
||||
userTag = radarrTags.find((v) =>
|
||||
v.label.startsWith(entity.requestedBy.id + '-')
|
||||
);
|
||||
}
|
||||
if (!userTag) {
|
||||
logger.info(`Requester has no active tag. Creating new`, {
|
||||
label: 'Media Request',
|
||||
@@ -310,11 +259,11 @@ export class MediaRequestSubscriber
|
||||
mediaId: entity.media.id,
|
||||
userId: entity.requestedBy.id,
|
||||
newTag:
|
||||
entity.requestedBy.id + '-' + entity.requestedBy.displayName,
|
||||
entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
|
||||
});
|
||||
userTag = await radarr.createTag({
|
||||
label:
|
||||
entity.requestedBy.id + '-' + entity.requestedBy.displayName,
|
||||
entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
|
||||
});
|
||||
}
|
||||
if (userTag.id) {
|
||||
@@ -609,17 +558,9 @@ export class MediaRequestSubscriber
|
||||
}
|
||||
|
||||
if (sonarrSettings.tagRequests) {
|
||||
const sonarrTags = await sonarr.getTags();
|
||||
// old tags had space around the hyphen
|
||||
let userTag = sonarrTags.find((v) =>
|
||||
let userTag = (await sonarr.getTags()).find((v) =>
|
||||
v.label.startsWith(entity.requestedBy.id + ' - ')
|
||||
);
|
||||
// new tags do not have spaces around the hyphen, since spaces are not allowed anymore
|
||||
if (!userTag) {
|
||||
userTag = sonarrTags.find((v) =>
|
||||
v.label.startsWith(entity.requestedBy.id + '-')
|
||||
);
|
||||
}
|
||||
if (!userTag) {
|
||||
logger.info(`Requester has no active tag. Creating new`, {
|
||||
label: 'Media Request',
|
||||
@@ -627,11 +568,11 @@ export class MediaRequestSubscriber
|
||||
mediaId: entity.media.id,
|
||||
userId: entity.requestedBy.id,
|
||||
newTag:
|
||||
entity.requestedBy.id + '-' + entity.requestedBy.displayName,
|
||||
entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
|
||||
});
|
||||
userTag = await sonarr.createTag({
|
||||
label:
|
||||
entity.requestedBy.id + '-' + entity.requestedBy.displayName,
|
||||
entity.requestedBy.id + ' - ' + entity.requestedBy.displayName,
|
||||
});
|
||||
}
|
||||
if (userTag.id) {
|
||||
@@ -841,10 +782,10 @@ export class MediaRequestSubscriber
|
||||
|
||||
if (event.entity.status === MediaRequestStatus.COMPLETED) {
|
||||
if (event.entity.media.mediaType === MediaType.MOVIE) {
|
||||
this.notifyAvailableMovie(event.entity as MediaRequest, event);
|
||||
this.notifyAvailableMovie(event.entity as MediaRequest);
|
||||
}
|
||||
if (event.entity.media.mediaType === MediaType.TV) {
|
||||
this.notifyAvailableSeries(event.entity as MediaRequest, event);
|
||||
this.notifyAvailableSeries(event.entity as MediaRequest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,11 +53,10 @@ div(style='display: block; background-color: #111827; padding: 2.5rem 0;')
|
||||
b(style='color: #9ca3af; font-weight: 700;')
|
||||
| #{extra.name}
|
||||
| #{extra.value}
|
||||
if imageUrl
|
||||
td(rowspan='2' style='width: 7rem;')
|
||||
a(style='display: block; width: 7rem; overflow: hidden; border-radius: .375rem;' href=actionUrl)
|
||||
div(style='overflow: hidden; box-sizing: border-box; margin: 0px;')
|
||||
img(alt='' src=imageUrl style='box-sizing: border-box; padding: 0px; border: none; margin: auto; display: block; min-width: 100%; max-width: 100%; min-height: 100%; max-height: 100%;')
|
||||
td(rowspan='2' style='width: 7rem;')
|
||||
a(style='display: block; width: 7rem; overflow: hidden; border-radius: .375rem;' href=actionUrl)
|
||||
div(style='overflow: hidden; box-sizing: border-box; margin: 0px;')
|
||||
img(alt='' src=imageUrl style='box-sizing: border-box; padding: 0px; border: none; margin: auto; display: block; min-width: 100%; max-width: 100%; min-height: 100%; max-height: 100%;')
|
||||
tr
|
||||
td(style='font-size: .85em; color: #9ca3af; line-height: 1em; vertical-align: bottom; margin-right: 1rem')
|
||||
span
|
||||
|
||||
35
server/types/languages.d.ts
vendored
35
server/types/languages.d.ts
vendored
@@ -1,35 +0,0 @@
|
||||
export type AvailableLocale =
|
||||
| 'ar'
|
||||
| 'bg'
|
||||
| 'ca'
|
||||
| 'cs'
|
||||
| 'da'
|
||||
| 'de'
|
||||
| 'en'
|
||||
| 'el'
|
||||
| 'es'
|
||||
| 'es-MX'
|
||||
| 'fi'
|
||||
| 'fr'
|
||||
| 'hr'
|
||||
| 'he'
|
||||
| 'hi'
|
||||
| 'hu'
|
||||
| 'it'
|
||||
| 'ja'
|
||||
| 'ko'
|
||||
| 'lt'
|
||||
| 'nb-NO'
|
||||
| 'nl'
|
||||
| 'pl'
|
||||
| 'pt-BR'
|
||||
| 'pt-PT'
|
||||
| 'ro'
|
||||
| 'ru'
|
||||
| 'sq'
|
||||
| 'sr'
|
||||
| 'sv'
|
||||
| 'tr'
|
||||
| 'uk'
|
||||
| 'zh-CN'
|
||||
| 'zh-TW';
|
||||
@@ -1,15 +1,11 @@
|
||||
import type { ProxySettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios, { type InternalAxiosRequestConfig } from 'axios';
|
||||
import axios from 'axios';
|
||||
import { HttpProxyAgent } from 'http-proxy-agent';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import type { Dispatcher } from 'undici';
|
||||
import { Agent, ProxyAgent, setGlobalDispatcher } from 'undici';
|
||||
|
||||
export let requestInterceptorFunction: (
|
||||
config: InternalAxiosRequestConfig
|
||||
) => InternalAxiosRequestConfig;
|
||||
|
||||
export default async function createCustomProxyAgent(
|
||||
proxySettings: ProxySettings
|
||||
) {
|
||||
@@ -77,8 +73,7 @@ export default async function createCustomProxyAgent(
|
||||
axios.defaults.httpsAgent = new HttpsProxyAgent(proxyUrl, {
|
||||
headers: token ? { 'proxy-authorization': token } : undefined,
|
||||
});
|
||||
|
||||
requestInterceptorFunction = (config) => {
|
||||
axios.interceptors.request.use((config) => {
|
||||
const url = config.baseURL
|
||||
? new URL(config.baseURL + (config.url || ''))
|
||||
: config.url;
|
||||
@@ -87,8 +82,7 @@ export default async function createCustomProxyAgent(
|
||||
config.httpsAgent = false;
|
||||
}
|
||||
return config;
|
||||
};
|
||||
axios.interceptors.request.use(requestInterceptorFunction);
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error('Failed to connect to the proxy: ' + e.message, {
|
||||
label: 'Proxy',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user