mirror of
https://github.com/fallenbagel/jellyseerr.git
synced 2025-12-25 03:11:39 -05:00
Compare commits
84 Commits
preview-re
...
preview-po
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
00e18a272a | ||
|
|
6957b7606e | ||
|
|
0154e6b538 | ||
|
|
3d0166aaef | ||
|
|
2fc8996606 | ||
|
|
ef5e954db1 | ||
|
|
39a5ccb7f3 | ||
|
|
9b151feb4f | ||
|
|
fe5d016929 | ||
|
|
14f316a9a6 | ||
|
|
5c24e79b1d | ||
|
|
ba84212e68 | ||
|
|
f25b32aec8 | ||
|
|
5a13226877 | ||
|
|
694913c767 | ||
|
|
a2d2fd3c2a | ||
|
|
cb94ad5a2e | ||
|
|
2829c2548a | ||
|
|
830f431e01 | ||
|
|
94efdf7a18 | ||
|
|
ab5cdf5464 | ||
|
|
d3805d99e8 | ||
|
|
7a5ee18e2c | ||
|
|
96591a9ddd | ||
|
|
46c3af115a | ||
|
|
4e63cee12b | ||
|
|
5ee29823c6 | ||
|
|
2add7af5ec | ||
|
|
3b12c98242 | ||
|
|
bd7339a105 | ||
|
|
66e308ba1d | ||
|
|
357b927ab3 | ||
|
|
f8926fa86c | ||
|
|
e2992fea9c | ||
|
|
2b0d497370 | ||
|
|
028012185c | ||
|
|
94a9806089 | ||
|
|
caaed7c8b8 | ||
|
|
29452648e6 | ||
|
|
f6b5a6fe9f | ||
|
|
cf4e3fd579 | ||
|
|
326d2cb4ca | ||
|
|
ff7bb884ae | ||
|
|
ef78fdd534 | ||
|
|
637e7dbd8e | ||
|
|
3ebf47fe61 | ||
|
|
88b67686ff | ||
|
|
86444b80b9 | ||
|
|
5f7679982a | ||
|
|
610c372498 | ||
|
|
7aca0be41c | ||
|
|
0bab6887b0 | ||
|
|
0d6a1f12fb | ||
|
|
87c8444ec6 | ||
|
|
f08c537cba | ||
|
|
b1b4dd9cfc | ||
|
|
eb111ac1db | ||
|
|
106cd195d4 | ||
|
|
42ad4e0ae3 | ||
|
|
501859207a | ||
|
|
8c7004c50d | ||
|
|
b594dec992 | ||
|
|
e93ab06504 | ||
|
|
0581d7b6ad | ||
|
|
00c811d10d | ||
|
|
14d3ec22b0 | ||
|
|
ed57911c7c | ||
|
|
e6cc2c55a2 | ||
|
|
4d85f29843 | ||
|
|
44aaca0fb2 | ||
|
|
abd80c1fa8 | ||
|
|
b6592bf9f7 | ||
|
|
573b64f901 | ||
|
|
b39a5a7d82 | ||
|
|
325e2ed6d3 | ||
|
|
e7c11da52b | ||
|
|
5712e19804 | ||
|
|
4b549763e5 | ||
|
|
24151d27f7 | ||
|
|
f3cc8cba0a | ||
|
|
57e7d68092 | ||
|
|
d3622f7bb3 | ||
|
|
20c821e2eb | ||
|
|
7b82ced5e6 |
@@ -448,6 +448,69 @@
|
||||
"contributions": [
|
||||
"security"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "j0srisk",
|
||||
"name": "Joseph Risk",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/18372584?v=4",
|
||||
"profile": "http://josephrisk.com",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Loetwiek",
|
||||
"name": "Loetwiek",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/79059734?v=4",
|
||||
"profile": "https://github.com/Loetwiek",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Fuochi",
|
||||
"name": "Fuochi",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4720478?v=4",
|
||||
"profile": "https://github.com/Fuochi",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "demrich",
|
||||
"name": "David Emrich",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/30092389?v=4",
|
||||
"profile": "https://github.com/demrich",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "maxnatamo",
|
||||
"name": "Max T. Kristiansen",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/5898152?v=4",
|
||||
"profile": "https://maxtrier.dk",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "DamsDev1",
|
||||
"name": "Damien Fajole",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/60252259?v=4",
|
||||
"profile": "https://damsdev.me",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "AhmedNSidd",
|
||||
"name": "Ahmed Siddiqui",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/36286128?v=4",
|
||||
"profile": "https://github.com/AhmedNSidd",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/bug.yml
vendored
8
.github/ISSUE_TEMPLATE/bug.yml
vendored
@@ -55,6 +55,14 @@ body:
|
||||
- tablet
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: database
|
||||
attributes:
|
||||
options:
|
||||
- SQLite (default)
|
||||
- PostgreSQL
|
||||
label: Database
|
||||
description: Which database backend are you using?
|
||||
- type: input
|
||||
id: device
|
||||
attributes:
|
||||
|
||||
@@ -8,3 +8,4 @@ pnpm-lock.yaml
|
||||
# assets
|
||||
src/assets/
|
||||
public/
|
||||
docs/
|
||||
|
||||
@@ -3,6 +3,12 @@ module.exports = {
|
||||
singleQuote: true,
|
||||
trailingComma: 'es5',
|
||||
overrides: [
|
||||
{
|
||||
files: 'pnpm-lock.yaml',
|
||||
options: {
|
||||
rangeEnd: 0, // default: Infinity
|
||||
},
|
||||
},
|
||||
{
|
||||
files: 'gen-docs/pnpm-lock.yaml',
|
||||
options: {
|
||||
|
||||
@@ -48,7 +48,7 @@ All help is welcome and greatly appreciated! If you would like to contribute to
|
||||
4. Run the development environment:
|
||||
|
||||
```bash
|
||||
pnpm
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
|
||||
@@ -291,6 +291,12 @@ Thanks goes to these wonderful people from Overseerr ([emoji key](https://allcon
|
||||
<td align="center" valign="top" width="14.28%"><a href="http://josephrisk.com"><img src="https://avatars.githubusercontent.com/u/18372584?v=4?s=100" width="100px;" alt="Joseph Risk"/><br /><sub><b>Joseph Risk</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=j0srisk" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Loetwiek"><img src="https://avatars.githubusercontent.com/u/79059734?v=4?s=100" width="100px;" alt="Loetwiek"/><br /><sub><b>Loetwiek</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=Loetwiek" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Fuochi"><img src="https://avatars.githubusercontent.com/u/4720478?v=4?s=100" width="100px;" alt="Fuochi"/><br /><sub><b>Fuochi</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=Fuochi" title="Documentation">📖</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/demrich"><img src="https://avatars.githubusercontent.com/u/30092389?v=4?s=100" width="100px;" alt="David Emrich"/><br /><sub><b>David Emrich</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=demrich" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://maxtrier.dk"><img src="https://avatars.githubusercontent.com/u/5898152?v=4?s=100" width="100px;" alt="Max T. Kristiansen"/><br /><sub><b>Max T. Kristiansen</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=maxnatamo" title="Code">💻</a></td>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://damsdev.me"><img src="https://avatars.githubusercontent.com/u/60252259?v=4?s=100" width="100px;" alt="Damien Fajole"/><br /><sub><b>Damien Fajole</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=DamsDev1" title="Code">💻</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/AhmedNSidd"><img src="https://avatars.githubusercontent.com/u/36286128?v=4?s=100" width="100px;" alt="Ahmed Siddiqui"/><br /><sub><b>Ahmed Siddiqui</b></sub></a><br /><a href="https://github.com/sct/overseerr/commits?author=AhmedNSidd" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
@@ -75,6 +75,7 @@
|
||||
"types": 0,
|
||||
"options": {
|
||||
"webhookUrl": "",
|
||||
"webhookRoleId": "",
|
||||
"enableMentions": true
|
||||
}
|
||||
},
|
||||
|
||||
38
docker-compose.postgres.yaml
Normal file
38
docker-compose.postgres.yaml
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
version: '3.8'
|
||||
services:
|
||||
jellyseerr:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.local
|
||||
ports:
|
||||
- '5055:5055'
|
||||
environment:
|
||||
DB_TYPE: 'postgres' # Which DB engine to use. The default is "sqlite". To use postgres, this needs to be set to "postgres"
|
||||
DB_HOST: 'postgres' # The host (url) of the database
|
||||
DB_PORT: '5432' # The port to connect to
|
||||
DB_USER: 'jellyseerr' # Username used to connect to the database
|
||||
DB_PASS: 'jellyseerr' # Password of the user used to connect to the database
|
||||
DB_NAME: 'jellyseerr' # The name of the database to connect to
|
||||
DB_LOG_QUERIES: 'false' # Whether to log the DB queries for debugging
|
||||
DB_USE_SSL: 'false' # Whether to enable ssl for database connection
|
||||
volumes:
|
||||
- .:/app:rw,cached
|
||||
- /app/node_modules
|
||||
- /app/.next
|
||||
depends_on:
|
||||
- postgres
|
||||
links:
|
||||
- postgres
|
||||
postgres:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_USER: jellyseerr
|
||||
POSTGRES_PASSWORD: jellyseerr
|
||||
POSTGRES_DB: jellyseerr
|
||||
ports:
|
||||
- '5432:5432'
|
||||
volumes:
|
||||
- postgres:/var/lib/postgresql/data
|
||||
volumes:
|
||||
postgres:
|
||||
@@ -17,6 +17,7 @@ Welcome to the Jellyseerr Documentation.
|
||||
- **Mobile-friendly design**, for when you need to approve requests on the go.
|
||||
- Granular permission system.
|
||||
- Localization into other languages.
|
||||
- Support for PostgreSQL and SQLite databases.
|
||||
- More features to come!
|
||||
|
||||
## Motivation
|
||||
|
||||
56
docs/extending-jellyseerr/database-config.mdx
Normal file
56
docs/extending-jellyseerr/database-config.mdx
Normal file
@@ -0,0 +1,56 @@
|
||||
---
|
||||
title: Configuring the Database (Advanced)
|
||||
description: Configure the database for Jellyseerr
|
||||
sidebar_position: 2
|
||||
---
|
||||
# Configuring the Database
|
||||
|
||||
Jellyseerr supports SQLite and PostgreSQL. The database connection can be configured using the following environment variables:
|
||||
|
||||
## SQLite Options
|
||||
|
||||
```dotenv
|
||||
DB_TYPE="sqlite" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite".
|
||||
CONFIG_DIRECTORY="config" # (optional) The path to the config directory where the db file is stored. The default is "config".
|
||||
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
|
||||
```
|
||||
|
||||
## PostgreSQL Options
|
||||
|
||||
```dotenv
|
||||
DB_TYPE="postgres" # Which DB engine to use, either "sqlite" or "postgres". The default is "sqlite". To use postgres, this needs to be set to "postgres"
|
||||
DB_HOST="localhost" # (optional) The host (url) of the database. The default is "localhost".
|
||||
DB_PORT="5432" # (optional) The port to connect to. The default is "5432".
|
||||
DB_USER= # (required) Username used to connect to the database
|
||||
DB_PASS= # (required) Password of the user used to connect to the database
|
||||
DB_NAME="jellyseerr" # (optional) The name of the database to connect to. The default is "jellyseerr".
|
||||
DB_LOG_QUERIES="false" # (optional) Whether to log the DB queries for debugging. The default is "false".
|
||||
```
|
||||
|
||||
### SSL configuration
|
||||
The following options can be used to further configure ssl. Certificates can be provided as a string or a file path, with the string version taking precedence.
|
||||
|
||||
```dotenv
|
||||
DB_USE_SSL="false" # (optional) Whether to enable ssl for database connection. This must be "true" to use the other ssl options. The default is "false".
|
||||
DB_SSL_REJECT_UNAUTHORIZED="true" # (optional) Whether to reject ssl connections with unverifiable certificates i.e. self-signed certificates without providing the below settings. The default is "true".
|
||||
DB_SSL_CA= # (optional) The CA certificate to verify the connection, provided as a string. The default is "".
|
||||
DB_SSL_CA_FILE= # (optional) The path to a CA certificate to verify the connection. The default is "".
|
||||
DB_SSL_KEY= # (optional) The private key for the connection in PEM format, provided as a string. The default is "".
|
||||
DB_SSL_KEY_FILE= # (optinal) Path to the private key for the connection in PEM format. The default is "".
|
||||
DB_SSL_CERT= # (optional) Certificate chain in pem format for the private key, provided as a string. The default is "".
|
||||
DB_SSL_CERT_FILE= # (optional) Path to certificate chain in pem format for the private key. The default is "".
|
||||
```
|
||||
|
||||
### Migrating from SQLite to PostgreSQL
|
||||
|
||||
1. Set up your PostgreSQL database and configure Jellyseerr to use it
|
||||
2. Run Jellyseerr to create the tables in the PostgreSQL database
|
||||
3. Stop Jellyseerr
|
||||
4. Run the following command to export the data from the SQLite database and import it into the PostgreSQL database:
|
||||
- Edit the postgres connection string to match your setup
|
||||
- WARNING: The most recent release of pgloader has an issue quoting the table columns. Use the version in the docker container to avoid this issue.
|
||||
- "I don't have or don't want to use docker" - You can build the working pgloader version [in this PR](https://github.com/dimitri/pgloader/pull/1531) from source and use the same options as below.
|
||||
```bash
|
||||
docker run --rm -v config/db.sqlite3:/db.sqlite3:ro -v pgloader/pgloader.load:/pgloader.load ghcr.io/ralgar/pgloader:pr-1531 pgloader --with "quote identifiers" --with "data only" /db.sqlite3 postgresql://{{DB_USER}}:{{DB_PASS}}@{{DB_HOST}}:{{DB_PORT}}/{{DB_NAME}}
|
||||
```
|
||||
5. Start Jellyseerr
|
||||
@@ -95,6 +95,8 @@ location ^~ /jellyseerr {
|
||||
sub_filter '/api/v1' '/$app/api/v1';
|
||||
sub_filter '/login/plex/loading' '/$app/login/plex/loading';
|
||||
sub_filter '/images/' '/$app/images/';
|
||||
sub_filter '/imageproxy/' '/$app/imageproxy/';
|
||||
sub_filter '/avatarproxy/' '/$app/avatarproxy/';
|
||||
sub_filter '/android-' '/$app/android-';
|
||||
sub_filter '/apple-' '/$app/apple-';
|
||||
sub_filter '/favicon' '/$app/favicon';
|
||||
|
||||
@@ -3,9 +3,7 @@ title: Build From Source (Advanced)
|
||||
description: Install Jellyseerr by building from source
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Build from Source (Advanced)
|
||||
|
||||
:::warning
|
||||
This method is not recommended for most users. It is intended for advanced users who are familiar with managing their own server infrastructure.
|
||||
:::
|
||||
@@ -14,43 +12,31 @@ import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- [Node.js 20.x](https://nodejs.org/en/download/)
|
||||
- [Pnpm 9.x](https://pnpm.io/installation)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
|
||||
## Unix (Linux, macOS)
|
||||
|
||||
### Installation
|
||||
|
||||
1. Assuming you want the working directory to be `/opt/jellyseerr`, create the directory and navigate to it:
|
||||
|
||||
```bash
|
||||
sudo mkdir -p /opt/jellyseerr && cd /opt/jellyseerr
|
||||
```
|
||||
|
||||
2. Clone the Jellyseerr repository and checkout the develop branch:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/Fallenbagel/jellyseerr.git
|
||||
cd jellyseerr
|
||||
git checkout develop # by default, you are on the develop branch so this step is not necessary
|
||||
```
|
||||
|
||||
3. Install the dependencies:
|
||||
|
||||
```bash
|
||||
CYPRESS_INSTALL_BINARY=0 pnpm install --frozen-lockfile
|
||||
```
|
||||
|
||||
4. Build the project:
|
||||
|
||||
```bash
|
||||
pnpm build
|
||||
```
|
||||
|
||||
5. Start Jellyseerr:
|
||||
|
||||
```bash
|
||||
pnpm start
|
||||
```
|
||||
@@ -60,7 +46,6 @@ You can now access Jellyseerr by visiting `http://localhost:5055` in your web br
|
||||
:::
|
||||
|
||||
#### Extending the installation
|
||||
|
||||
<Tabs groupId="unix-extensions" queryString>
|
||||
<TabItem value="linux" label="Linux">
|
||||
To run jellyseerr as a systemd service:
|
||||
@@ -71,23 +56,21 @@ To run jellyseerr as a systemd service:
|
||||
PORT=5055
|
||||
|
||||
## specify on which interface to listen, by default jellyseerr listens on all interfaces
|
||||
|
||||
#HOST=127.0.0.1
|
||||
|
||||
## Uncomment if your media server is emby instead of jellyfin.
|
||||
|
||||
# JELLYFIN_TYPE=emby
|
||||
|
||||
````
|
||||
## Uncomment if you want to force Node.js to resolve IPv4 before IPv6 (advanced users only)
|
||||
# FORCE_IPV4_FIRST=true
|
||||
```
|
||||
2. Then run the following commands:
|
||||
```bash
|
||||
which node
|
||||
````
|
||||
|
||||
```
|
||||
Copy the path to node, it should be something like `/usr/bin/node`.
|
||||
|
||||
3. Create the systemd service file at `/etc/systemd/system/jellyseerr.service`, using either `sudo systemctl edit jellyseerr` or `sudo nano /etc/systemd/system/jellyseerr.service`:
|
||||
|
||||
```bash
|
||||
[Unit]
|
||||
Description=Jellyseerr Service
|
||||
@@ -105,18 +88,15 @@ ExecStart=/usr/bin/node dist/index.js
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
:::note
|
||||
If you are using a different path to node, replace `/usr/bin/node` with the path to node.
|
||||
:::
|
||||
|
||||
4. Enable and start the service:
|
||||
|
||||
```bash
|
||||
sudo systemctl enable jellyseerr
|
||||
sudo systemctl start jellyseerr
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="macos" label="macOS">
|
||||
To run jellyseerr as a launchd service:
|
||||
@@ -127,7 +107,6 @@ which node
|
||||
Copy the path to node, it should be something like `/usr/local/bin/node`.
|
||||
|
||||
2. Create a launchd plist file at `~/Library/LaunchAgents/com.jellyseerr.plist`:
|
||||
|
||||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
@@ -156,27 +135,21 @@ Copy the path to node, it should be something like `/usr/local/bin/node`.
|
||||
</dict>
|
||||
</plist>
|
||||
```
|
||||
|
||||
:::note
|
||||
If you are using a different path to node, replace `/usr/local/bin/node` with the path to node.
|
||||
::: 3. Load the service:
|
||||
|
||||
:::
|
||||
3. Load the service:
|
||||
```bash
|
||||
sudo launchctl load ~/Library/LaunchAgents/com.jellyseerr.plist
|
||||
```
|
||||
|
||||
3. Start the service:
|
||||
|
||||
```bash
|
||||
sudo launchctl start com.jellyseerr
|
||||
```
|
||||
|
||||
4. To ensure the service starts on boot, run the following command:
|
||||
|
||||
```bash
|
||||
sudo lauchctl load
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="pm2" label="PM2">
|
||||
To run jellyseerr as a PM2 service:
|
||||
@@ -221,38 +194,27 @@ pm2 status jellyseerr
|
||||
</Tabs>
|
||||
|
||||
## Windows
|
||||
|
||||
### Installation
|
||||
|
||||
1. Assuming you want the working directory to be `C:\jellyseerr`, create the directory and navigate to it:
|
||||
|
||||
```powershell
|
||||
mkdir C:\jellyseerr
|
||||
cd C:\jellyseerr
|
||||
```
|
||||
|
||||
2. Clone the Jellyseerr repository and checkout the develop branch:
|
||||
|
||||
```powershell
|
||||
git clone https://github.com/Fallenbagel/jellyseerr.git .
|
||||
git checkout develop # by default, you are on the develop branch so this step is not necessary
|
||||
```
|
||||
|
||||
3. Install the dependencies:
|
||||
|
||||
```powershell
|
||||
npm install -g win-node-env
|
||||
set CYPRESS_INSTALL_BINARY=0 && pnpm install --frozen-lockfile
|
||||
```
|
||||
|
||||
4. Build the project:
|
||||
|
||||
```powershell
|
||||
pnpm build
|
||||
```
|
||||
|
||||
5. Start Jellyseerr:
|
||||
|
||||
```powershell
|
||||
pnpm start
|
||||
```
|
||||
@@ -266,7 +228,6 @@ You can now access Jellyseerr by visiting `http://localhost:5055` in your web br
|
||||
:::
|
||||
|
||||
#### Extending the installation
|
||||
|
||||
<Tabs groupId="windows-extensions" queryString>
|
||||
<TabItem value="task-scheduler" label="Task Scheduler">
|
||||
To run jellyseerr as a bat script:
|
||||
@@ -288,7 +249,6 @@ node dist/index.js
|
||||
- Click "Finish"
|
||||
|
||||
Now, Jellyseerr will start when the computer boots up in the background.
|
||||
|
||||
</TabItem>
|
||||
|
||||
<TabItem value="nssm" label="NSSM">
|
||||
@@ -351,11 +311,9 @@ pm2 status jellyseerr
|
||||
</Tabs>
|
||||
|
||||
### Updating
|
||||
|
||||
To update Jellyseerr, navigate to the Jellyseerr directory and run the following commands:
|
||||
|
||||
```bash
|
||||
git pull
|
||||
```
|
||||
|
||||
Then, follow the steps in the installation section to rebuild and restart Jellyseerr.
|
||||
|
||||
|
||||
@@ -18,6 +18,10 @@ Users can optionally opt-in to being mentioned in Discord notifications by confi
|
||||
|
||||
You can find the webhook URL in the Discord application, at **Server Settings → Integrations → Webhooks**.
|
||||
|
||||
### Notification Role ID (optional)
|
||||
|
||||
If a role ID is specified, it will be included in the webhook message. See [Discord role ID](https://support.discord.com/hc/en-us/articles/206346498-Where-can-I-find-my-User-Server-Message-ID).
|
||||
|
||||
### Bot Username (optional)
|
||||
|
||||
If you would like to override the name you configured for your bot in Discord, you may set this value to whatever you like!
|
||||
|
||||
2
next-env.d.ts
vendored
2
next-env.d.ts
vendored
@@ -2,4 +2,4 @@
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/pages/building-your-application/configuring/typescript for more information.
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
|
||||
@@ -4,11 +4,13 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
commitTag: process.env.COMMIT_TAG || 'local',
|
||||
forceIpv4First: process.env.FORCE_IPV4_FIRST === 'true' ? 'true' : 'false',
|
||||
},
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{ hostname: 'gravatar.com' },
|
||||
{ hostname: 'image.tmdb.org' },
|
||||
{ hostname: 'artworks.thetvdb.com' },
|
||||
],
|
||||
},
|
||||
webpack(config) {
|
||||
|
||||
@@ -1273,6 +1273,8 @@ components:
|
||||
type: string
|
||||
webhookUrl:
|
||||
type: string
|
||||
webhookRoleId:
|
||||
type: string
|
||||
enableMentions:
|
||||
type: boolean
|
||||
SlackSettings:
|
||||
@@ -4142,6 +4144,21 @@ paths:
|
||||
'412':
|
||||
description: Item has already been blacklisted
|
||||
/blacklist/{tmdbId}:
|
||||
get:
|
||||
summary: Get media from blacklist
|
||||
tags:
|
||||
- blacklist
|
||||
parameters:
|
||||
- in: path
|
||||
name: tmdbId
|
||||
description: tmdbId ID
|
||||
required: true
|
||||
example: '1'
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Blacklist details in JSON
|
||||
delete:
|
||||
summary: Remove media from blacklist
|
||||
tags:
|
||||
@@ -5469,7 +5486,7 @@ paths:
|
||||
- type: array
|
||||
items:
|
||||
type: number
|
||||
minimum: 1
|
||||
minimum: 0
|
||||
- type: string
|
||||
enum: [all]
|
||||
is4k:
|
||||
@@ -5575,7 +5592,7 @@ paths:
|
||||
type: array
|
||||
items:
|
||||
type: number
|
||||
minimum: 1
|
||||
minimum: 0
|
||||
is4k:
|
||||
type: boolean
|
||||
example: false
|
||||
|
||||
@@ -43,8 +43,6 @@
|
||||
"@svgr/webpack": "6.5.1",
|
||||
"@tanem/react-nprogress": "5.0.30",
|
||||
"ace-builds": "1.15.2",
|
||||
"axios": "1.3.4",
|
||||
"axios-rate-limit": "1.3.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bowser": "2.11.0",
|
||||
"connect-typeorm": "1.1.4",
|
||||
@@ -62,7 +60,6 @@
|
||||
"express-rate-limit": "6.7.0",
|
||||
"express-session": "1.17.3",
|
||||
"formik": "^2.4.6",
|
||||
"global-agent": "^3.0.0",
|
||||
"gravatar-url": "3.1.0",
|
||||
"lodash": "4.17.21",
|
||||
"mime": "3",
|
||||
@@ -72,6 +69,7 @@
|
||||
"node-schedule": "2.1.1",
|
||||
"nodemailer": "6.9.1",
|
||||
"openpgp": "5.7.0",
|
||||
"pg": "8.11.0",
|
||||
"plex-api": "5.3.2",
|
||||
"pug": "3.0.2",
|
||||
"react": "^18.3.1",
|
||||
|
||||
8242
pnpm-lock.yaml
generated
8242
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,8 @@
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import fs, { promises as fsp } from 'fs';
|
||||
import path from 'path';
|
||||
import fs, { promises as fsp } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Readable } from 'node:stream';
|
||||
import type { ReadableStream } from 'node:stream/web';
|
||||
import xml2js from 'xml2js';
|
||||
|
||||
const UPDATE_INTERVAL_MSEC = 24 * 3600 * 1000; // how often to download new mapping in milliseconds
|
||||
@@ -161,13 +162,18 @@ class AnimeListMapping {
|
||||
label: 'Anime-List Sync',
|
||||
});
|
||||
try {
|
||||
const response = await axios.get(MAPPING_URL, {
|
||||
responseType: 'stream',
|
||||
});
|
||||
await new Promise<void>((resolve) => {
|
||||
const response = await fetch(MAPPING_URL);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch: ${response.statusText}`);
|
||||
}
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writer = fs.createWriteStream(LOCAL_PATH);
|
||||
writer.on('finish', resolve);
|
||||
response.data.pipe(writer);
|
||||
writer.on('error', reject);
|
||||
if (!response.body) return reject();
|
||||
Readable.fromWeb(response.body as ReadableStream<Uint8Array>).pipe(
|
||||
writer
|
||||
);
|
||||
});
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to download Anime-List mapping: ${e.message}`);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { AxiosInstance, AxiosRequestConfig } from 'axios';
|
||||
import axios from 'axios';
|
||||
import rateLimit from 'axios-rate-limit';
|
||||
import type { RateLimitOptions } from '@server/utils/rateLimit';
|
||||
import rateLimit from '@server/utils/rateLimit';
|
||||
import type NodeCache from 'node-cache';
|
||||
|
||||
// 5 minute default TTL (in seconds)
|
||||
@@ -12,71 +11,101 @@ const DEFAULT_ROLLING_BUFFER = 10000;
|
||||
interface ExternalAPIOptions {
|
||||
nodeCache?: NodeCache;
|
||||
headers?: Record<string, unknown>;
|
||||
rateLimit?: {
|
||||
maxRPS: number;
|
||||
maxRequests: number;
|
||||
};
|
||||
rateLimit?: RateLimitOptions;
|
||||
}
|
||||
|
||||
class ExternalAPI {
|
||||
protected axios: AxiosInstance;
|
||||
protected fetch: typeof fetch;
|
||||
protected params: Record<string, string>;
|
||||
protected defaultHeaders: { [key: string]: string };
|
||||
private baseUrl: string;
|
||||
private cache?: NodeCache;
|
||||
|
||||
constructor(
|
||||
baseUrl: string,
|
||||
params: Record<string, unknown>,
|
||||
params: Record<string, string> = {},
|
||||
options: ExternalAPIOptions = {}
|
||||
) {
|
||||
this.axios = axios.create({
|
||||
baseURL: baseUrl,
|
||||
params,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
|
||||
if (options.rateLimit) {
|
||||
this.axios = rateLimit(this.axios, {
|
||||
maxRequests: options.rateLimit.maxRequests,
|
||||
maxRPS: options.rateLimit.maxRPS,
|
||||
});
|
||||
this.fetch = rateLimit(fetch, options.rateLimit);
|
||||
} else {
|
||||
this.fetch = fetch;
|
||||
}
|
||||
|
||||
const url = new URL(baseUrl);
|
||||
|
||||
this.defaultHeaders = {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
...((url.username || url.password) && {
|
||||
Authorization: `Basic ${Buffer.from(
|
||||
`${url.username}:${url.password}`
|
||||
).toString('base64')}`,
|
||||
}),
|
||||
...options.headers,
|
||||
};
|
||||
|
||||
if (url.username || url.password) {
|
||||
url.username = '';
|
||||
url.password = '';
|
||||
baseUrl = url.toString();
|
||||
}
|
||||
|
||||
this.baseUrl = baseUrl;
|
||||
this.params = params;
|
||||
this.cache = options.nodeCache;
|
||||
}
|
||||
|
||||
protected async get<T>(
|
||||
endpoint: string,
|
||||
config?: AxiosRequestConfig,
|
||||
ttl?: number
|
||||
params?: Record<string, string>,
|
||||
ttl?: number,
|
||||
config?: RequestInit
|
||||
): Promise<T> {
|
||||
const cacheKey = this.serializeCacheKey(endpoint, config?.params);
|
||||
const cacheKey = this.serializeCacheKey(endpoint, {
|
||||
...this.params,
|
||||
...params,
|
||||
});
|
||||
const cachedItem = this.cache?.get<T>(cacheKey);
|
||||
if (cachedItem) {
|
||||
return cachedItem;
|
||||
}
|
||||
|
||||
const response = await this.axios.get<T>(endpoint, config);
|
||||
const url = this.formatUrl(endpoint, params);
|
||||
const response = await this.fetch(url, {
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${text ? ': ' + text : ''}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const data = await this.getDataFromResponse(response);
|
||||
|
||||
if (this.cache) {
|
||||
this.cache.set(cacheKey, response.data, ttl ?? DEFAULT_TTL);
|
||||
if (this.cache && ttl !== 0) {
|
||||
this.cache.set(cacheKey, data, ttl ?? DEFAULT_TTL);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
}
|
||||
|
||||
protected async post<T>(
|
||||
endpoint: string,
|
||||
data?: Record<string, unknown>,
|
||||
config?: AxiosRequestConfig,
|
||||
ttl?: number
|
||||
params?: Record<string, string>,
|
||||
ttl?: number,
|
||||
config?: RequestInit
|
||||
): Promise<T> {
|
||||
const cacheKey = this.serializeCacheKey(endpoint, {
|
||||
config: config?.params,
|
||||
config: { ...this.params, ...params },
|
||||
data,
|
||||
});
|
||||
const cachedItem = this.cache?.get<T>(cacheKey);
|
||||
@@ -84,21 +113,117 @@ class ExternalAPI {
|
||||
return cachedItem;
|
||||
}
|
||||
|
||||
const response = await this.axios.post<T>(endpoint, data, config);
|
||||
const url = this.formatUrl(endpoint, params);
|
||||
const response = await this.fetch(url, {
|
||||
method: 'POST',
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
body: data ? JSON.stringify(data) : undefined,
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${text ? ': ' + text : ''}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const resData = await this.getDataFromResponse(response);
|
||||
|
||||
if (this.cache) {
|
||||
this.cache.set(cacheKey, response.data, ttl ?? DEFAULT_TTL);
|
||||
if (this.cache && ttl !== 0) {
|
||||
this.cache.set(cacheKey, resData, ttl ?? DEFAULT_TTL);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
return resData;
|
||||
}
|
||||
|
||||
protected async put<T>(
|
||||
endpoint: string,
|
||||
data: Record<string, unknown>,
|
||||
params?: Record<string, string>,
|
||||
ttl?: number,
|
||||
config?: RequestInit
|
||||
): Promise<T> {
|
||||
const cacheKey = this.serializeCacheKey(endpoint, {
|
||||
config: { ...this.params, ...params },
|
||||
data,
|
||||
});
|
||||
const cachedItem = this.cache?.get<T>(cacheKey);
|
||||
if (cachedItem) {
|
||||
return cachedItem;
|
||||
}
|
||||
|
||||
const url = this.formatUrl(endpoint, params);
|
||||
const response = await this.fetch(url, {
|
||||
method: 'PUT',
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${text ? ': ' + text : ''}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const resData = await this.getDataFromResponse(response);
|
||||
|
||||
if (this.cache && ttl !== 0) {
|
||||
this.cache.set(cacheKey, resData, ttl ?? DEFAULT_TTL);
|
||||
}
|
||||
|
||||
return resData;
|
||||
}
|
||||
|
||||
protected async delete<T>(
|
||||
endpoint: string,
|
||||
params?: Record<string, string>,
|
||||
config?: RequestInit
|
||||
): Promise<T> {
|
||||
const url = this.formatUrl(endpoint, params);
|
||||
const response = await this.fetch(url, {
|
||||
method: 'DELETE',
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${text ? ': ' + text : ''}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const data = await this.getDataFromResponse(response);
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
protected async getRolling<T>(
|
||||
endpoint: string,
|
||||
config?: AxiosRequestConfig,
|
||||
ttl?: number
|
||||
params?: Record<string, string>,
|
||||
ttl?: number,
|
||||
config?: RequestInit,
|
||||
overwriteBaseUrl?: string
|
||||
): Promise<T> {
|
||||
const cacheKey = this.serializeCacheKey(endpoint, config?.params);
|
||||
const cacheKey = this.serializeCacheKey(endpoint, {
|
||||
...this.params,
|
||||
...params,
|
||||
});
|
||||
const cachedItem = this.cache?.get<T>(cacheKey);
|
||||
|
||||
if (cachedItem) {
|
||||
@@ -109,20 +234,78 @@ class ExternalAPI {
|
||||
keyTtl - (ttl ?? DEFAULT_TTL) * 1000 <
|
||||
Date.now() - DEFAULT_ROLLING_BUFFER
|
||||
) {
|
||||
this.axios.get<T>(endpoint, config).then((response) => {
|
||||
this.cache?.set(cacheKey, response.data, ttl ?? DEFAULT_TTL);
|
||||
const url = this.formatUrl(endpoint, params, overwriteBaseUrl);
|
||||
this.fetch(url, {
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
}).then(async (response) => {
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${
|
||||
text ? ': ' + text : ''
|
||||
}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const data = await this.getDataFromResponse(response);
|
||||
this.cache?.set(cacheKey, data, ttl ?? DEFAULT_TTL);
|
||||
});
|
||||
}
|
||||
return cachedItem;
|
||||
}
|
||||
|
||||
const response = await this.axios.get<T>(endpoint, config);
|
||||
const url = this.formatUrl(endpoint, params, overwriteBaseUrl);
|
||||
const response = await this.fetch(url, {
|
||||
...config,
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...config?.headers,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(
|
||||
`${response.status} ${response.statusText}${text ? ': ' + text : ''}`,
|
||||
{
|
||||
cause: response,
|
||||
}
|
||||
);
|
||||
}
|
||||
const data = await this.getDataFromResponse(response);
|
||||
|
||||
if (this.cache) {
|
||||
this.cache.set(cacheKey, response.data, ttl ?? DEFAULT_TTL);
|
||||
this.cache.set(cacheKey, data, ttl ?? DEFAULT_TTL);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
}
|
||||
|
||||
private formatUrl(
|
||||
endpoint: string,
|
||||
params?: Record<string, string>,
|
||||
overwriteBaseUrl?: string
|
||||
): string {
|
||||
const baseUrl = overwriteBaseUrl || this.baseUrl;
|
||||
const href =
|
||||
baseUrl +
|
||||
(baseUrl.endsWith('/') ? '' : '/') +
|
||||
(endpoint.startsWith('/') ? endpoint.slice(1) : endpoint);
|
||||
const searchParams = new URLSearchParams({
|
||||
...this.params,
|
||||
...params,
|
||||
});
|
||||
return (
|
||||
href +
|
||||
(searchParams.toString().length
|
||||
? '?' + searchParams.toString()
|
||||
: searchParams.toString())
|
||||
);
|
||||
}
|
||||
|
||||
private serializeCacheKey(
|
||||
@@ -135,6 +318,29 @@ class ExternalAPI {
|
||||
|
||||
return `${this.baseUrl}${endpoint}${JSON.stringify(params)}`;
|
||||
}
|
||||
|
||||
private async getDataFromResponse(response: Response) {
|
||||
const contentType = response.headers.get('Content-Type');
|
||||
if (contentType?.includes('application/json')) {
|
||||
return await response.json();
|
||||
} else if (
|
||||
contentType?.includes('application/xml') ||
|
||||
contentType?.includes('text/html') ||
|
||||
contentType?.includes('text/plain')
|
||||
) {
|
||||
return await response.text();
|
||||
} else {
|
||||
try {
|
||||
return await response.json();
|
||||
} catch {
|
||||
try {
|
||||
return await response.blob();
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default ExternalAPI;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import cacheManager from '@server/lib/cache';
|
||||
import logger from '@server/logger';
|
||||
import ExternalAPI from './externalapi';
|
||||
|
||||
interface GitHubRelease {
|
||||
url: string;
|
||||
@@ -67,10 +67,6 @@ class GithubAPI extends ExternalAPI {
|
||||
'https://api.github.com',
|
||||
{},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
nodeCache: cacheManager.getCache('github').data,
|
||||
}
|
||||
);
|
||||
@@ -85,9 +81,7 @@ class GithubAPI extends ExternalAPI {
|
||||
const data = await this.get<GitHubRelease[]>(
|
||||
'/repos/fallenbagel/jellyseerr/releases',
|
||||
{
|
||||
params: {
|
||||
per_page: take,
|
||||
},
|
||||
per_page: take.toString(),
|
||||
}
|
||||
);
|
||||
|
||||
@@ -112,10 +106,8 @@ class GithubAPI extends ExternalAPI {
|
||||
const data = await this.get<GithubCommit[]>(
|
||||
'/repos/fallenbagel/jellyseerr/commits',
|
||||
{
|
||||
params: {
|
||||
per_page: take,
|
||||
branch,
|
||||
},
|
||||
per_page: take.toString(),
|
||||
branch,
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -109,8 +109,6 @@ class JellyfinAPI extends ExternalAPI {
|
||||
{
|
||||
headers: {
|
||||
'X-Emby-Authorization': authHeaderVal,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
@@ -122,7 +120,7 @@ class JellyfinAPI extends ExternalAPI {
|
||||
ClientIP?: string
|
||||
): Promise<JellyfinLoginResponse> {
|
||||
const authenticate = async (useHeaders: boolean) => {
|
||||
const headers =
|
||||
const headers: { [key: string]: string } =
|
||||
useHeaders && ClientIP ? { 'X-Forwarded-For': ClientIP } : {};
|
||||
|
||||
return this.post<JellyfinLoginResponse>(
|
||||
@@ -131,6 +129,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
Username,
|
||||
Pw: Password,
|
||||
},
|
||||
{},
|
||||
undefined,
|
||||
{ headers }
|
||||
);
|
||||
};
|
||||
@@ -138,39 +138,38 @@ class JellyfinAPI extends ExternalAPI {
|
||||
try {
|
||||
return await authenticate(true);
|
||||
} catch (e) {
|
||||
logger.debug(`Failed to authenticate with headers: ${e.message}`, {
|
||||
logger.debug('Failed to authenticate with headers', {
|
||||
label: 'Jellyfin API',
|
||||
error: e.cause.message ?? e.cause.statusText,
|
||||
ip: ClientIP,
|
||||
});
|
||||
|
||||
if (!e.cause.status) {
|
||||
throw new ApiError(404, ApiErrorCode.InvalidUrl);
|
||||
}
|
||||
|
||||
if (e.cause.status === 401) {
|
||||
throw new ApiError(e.cause.status, ApiErrorCode.InvalidCredentials);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return await authenticate(false);
|
||||
} catch (e) {
|
||||
const status = e.cause?.status;
|
||||
|
||||
const networkErrorCodes = new Set([
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNRESET',
|
||||
'EADDRINUSE',
|
||||
'ENETDOWN',
|
||||
'ENETUNREACH',
|
||||
'EPIPE',
|
||||
'ECONNABORTED',
|
||||
'EPROTO',
|
||||
'EHOSTDOWN',
|
||||
'EAI_AGAIN',
|
||||
'ERR_INVALID_URL',
|
||||
]);
|
||||
|
||||
if (networkErrorCodes.has(e.code) || status === 404) {
|
||||
throw new ApiError(status, ApiErrorCode.InvalidUrl);
|
||||
if (e.cause.status === 401) {
|
||||
throw new ApiError(e.cause.status, ApiErrorCode.InvalidCredentials);
|
||||
}
|
||||
|
||||
throw new ApiError(status, ApiErrorCode.InvalidCredentials);
|
||||
logger.error(
|
||||
'Something went wrong while authenticating with the Jellyfin server',
|
||||
{
|
||||
label: 'Jellyfin API',
|
||||
error: e.cause.message ?? e.cause.statusText,
|
||||
ip: ClientIP,
|
||||
}
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause.status, ApiErrorCode.Unknown);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,8 +197,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
return serverResponse.ServerName;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the server name from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting the server name from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.Unknown);
|
||||
@@ -213,8 +212,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
return { users: userReponse };
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the account from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting the account from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -229,8 +228,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
return userReponse;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the account from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting the account from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -253,8 +252,11 @@ class JellyfinAPI extends ExternalAPI {
|
||||
return this.mapLibraries(mediaFolderResponse.Items);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting libraries from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting libraries from the Jellyfin server',
|
||||
{
|
||||
label: 'Jellyfin API',
|
||||
error: e.cause.message ?? e.cause.statusText,
|
||||
}
|
||||
);
|
||||
|
||||
return [];
|
||||
@@ -291,7 +293,16 @@ class JellyfinAPI extends ExternalAPI {
|
||||
public async getLibraryContents(id: string): Promise<JellyfinLibraryItem[]> {
|
||||
try {
|
||||
const libraryItemsResponse = await this.get<any>(
|
||||
`/Users/${this.userId}/Items?SortBy=SortName&SortOrder=Ascending&IncludeItemTypes=Series,Movie,Others&Recursive=true&StartIndex=0&ParentId=${id}&collapseBoxSetItems=false`
|
||||
`/Users/${this.userId}/Items`,
|
||||
{
|
||||
SortBy: 'SortName',
|
||||
SortOrder: 'Ascending',
|
||||
IncludeItemTypes: 'Series,Movie,Others',
|
||||
Recursive: 'true',
|
||||
StartIndex: '0',
|
||||
ParentId: id,
|
||||
collapseBoxSetItems: 'false',
|
||||
}
|
||||
);
|
||||
|
||||
return libraryItemsResponse.Items.filter(
|
||||
@@ -299,8 +310,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting library content from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting library content from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -310,14 +321,18 @@ class JellyfinAPI extends ExternalAPI {
|
||||
public async getRecentlyAdded(id: string): Promise<JellyfinLibraryItem[]> {
|
||||
try {
|
||||
const itemResponse = await this.get<any>(
|
||||
`/Users/${this.userId}/Items/Latest?Limit=12&ParentId=${id}`
|
||||
`/Users/${this.userId}/Items/Latest`,
|
||||
{
|
||||
Limit: '12',
|
||||
ParentId: id,
|
||||
}
|
||||
);
|
||||
|
||||
return itemResponse;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting library content from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting library content from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -341,8 +356,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
}
|
||||
|
||||
logger.error(
|
||||
`Something went wrong while getting library content from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting library content from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
}
|
||||
@@ -355,8 +370,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
return seasonResponse.Items;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the list of seasons from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting the list of seasons from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -369,7 +384,10 @@ class JellyfinAPI extends ExternalAPI {
|
||||
): Promise<JellyfinLibraryItem[]> {
|
||||
try {
|
||||
const episodeResponse = await this.get<any>(
|
||||
`/Shows/${seriesID}/Episodes?seasonId=${seasonID}`
|
||||
`/Shows/${seriesID}/Episodes`,
|
||||
{
|
||||
seasonId: seasonID,
|
||||
}
|
||||
);
|
||||
|
||||
return episodeResponse.Items.filter(
|
||||
@@ -377,8 +395,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the list of episodes from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while getting the list of episodes from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.cause?.status, ApiErrorCode.InvalidAuthToken);
|
||||
@@ -394,8 +412,8 @@ class JellyfinAPI extends ExternalAPI {
|
||||
).AccessToken;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while creating an API key from the Jellyfin server: ${e.message}`,
|
||||
{ label: 'Jellyfin API' }
|
||||
'Something went wrong while creating an API key from the Jellyfin server',
|
||||
{ label: 'Jellyfin API', error: e.cause.message ?? e.cause.statusText }
|
||||
);
|
||||
|
||||
throw new ApiError(e.response?.status, ApiErrorCode.InvalidAuthToken);
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import type { PlexDevice } from '@server/interfaces/api/plexInterfaces';
|
||||
import cacheManager from '@server/lib/cache';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import xml2js from 'xml2js';
|
||||
import ExternalAPI from './externalapi';
|
||||
|
||||
interface PlexAccountResponse {
|
||||
user: PlexUser;
|
||||
@@ -127,6 +128,11 @@ export interface PlexWatchlistItem {
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface PlexWatchlistCache {
|
||||
etag: string;
|
||||
response: WatchlistResponse;
|
||||
}
|
||||
|
||||
class PlexTvAPI extends ExternalAPI {
|
||||
private authToken: string;
|
||||
|
||||
@@ -137,8 +143,6 @@ class PlexTvAPI extends ExternalAPI {
|
||||
{
|
||||
headers: {
|
||||
'X-Plex-Token': authToken,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
nodeCache: cacheManager.getCache('plextv').data,
|
||||
}
|
||||
@@ -149,15 +153,11 @@ class PlexTvAPI extends ExternalAPI {
|
||||
|
||||
public async getDevices(): Promise<PlexDevice[]> {
|
||||
try {
|
||||
const devicesResp = await this.axios.get(
|
||||
'/api/resources?includeHttps=1',
|
||||
{
|
||||
transformResponse: [],
|
||||
responseType: 'text',
|
||||
}
|
||||
);
|
||||
const devicesResp = await this.get('/api/resources', {
|
||||
includeHttps: '1',
|
||||
});
|
||||
const parsedXml = await xml2js.parseStringPromise(
|
||||
devicesResp.data as DeviceResponse
|
||||
devicesResp as DeviceResponse
|
||||
);
|
||||
return parsedXml?.MediaContainer?.Device?.map((pxml: DeviceResponse) => ({
|
||||
name: pxml.$.name,
|
||||
@@ -205,11 +205,11 @@ class PlexTvAPI extends ExternalAPI {
|
||||
|
||||
public async getUser(): Promise<PlexUser> {
|
||||
try {
|
||||
const account = await this.axios.get<PlexAccountResponse>(
|
||||
const account = await this.get<PlexAccountResponse>(
|
||||
'/users/account.json'
|
||||
);
|
||||
|
||||
return account.data.user;
|
||||
return account.user;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Something went wrong while getting the account from plex.tv: ${e.message}`,
|
||||
@@ -249,13 +249,10 @@ class PlexTvAPI extends ExternalAPI {
|
||||
}
|
||||
|
||||
public async getUsers(): Promise<UsersResponse> {
|
||||
const response = await this.axios.get('/api/users', {
|
||||
transformResponse: [],
|
||||
responseType: 'text',
|
||||
});
|
||||
const data = await this.get('/api/users');
|
||||
|
||||
const parsedXml = (await xml2js.parseStringPromise(
|
||||
response.data
|
||||
data as string
|
||||
)) as UsersResponse;
|
||||
return parsedXml;
|
||||
}
|
||||
@@ -270,25 +267,50 @@ class PlexTvAPI extends ExternalAPI {
|
||||
items: PlexWatchlistItem[];
|
||||
}> {
|
||||
try {
|
||||
const response = await this.axios.get<WatchlistResponse>(
|
||||
'/library/sections/watchlist/all',
|
||||
{
|
||||
params: {
|
||||
'X-Plex-Container-Start': offset,
|
||||
'X-Plex-Container-Size': size,
|
||||
},
|
||||
baseURL: 'https://metadata.provider.plex.tv',
|
||||
}
|
||||
const watchlistCache = cacheManager.getCache('plexwatchlist');
|
||||
let cachedWatchlist = watchlistCache.data.get<PlexWatchlistCache>(
|
||||
this.authToken
|
||||
);
|
||||
|
||||
const params = new URLSearchParams({
|
||||
'X-Plex-Container-Start': offset.toString(),
|
||||
'X-Plex-Container-Size': size.toString(),
|
||||
});
|
||||
const response = await this.fetch(
|
||||
`https://metadata.provider.plex.tv/library/sections/watchlist/all?${params.toString()}`,
|
||||
{
|
||||
headers: {
|
||||
...this.defaultHeaders,
|
||||
...(cachedWatchlist?.etag
|
||||
? { 'If-None-Match': cachedWatchlist.etag }
|
||||
: {}),
|
||||
},
|
||||
}
|
||||
);
|
||||
const data = (await response.json()) as WatchlistResponse;
|
||||
|
||||
// If we don't recieve HTTP 304, the watchlist has been updated and we need to update the cache.
|
||||
if (response.status >= 200 && response.status <= 299) {
|
||||
cachedWatchlist = {
|
||||
etag: response.headers.get('etag') ?? '',
|
||||
response: data,
|
||||
};
|
||||
|
||||
watchlistCache.data.set<PlexWatchlistCache>(
|
||||
this.authToken,
|
||||
cachedWatchlist
|
||||
);
|
||||
}
|
||||
|
||||
const watchlistDetails = await Promise.all(
|
||||
(response.data.MediaContainer.Metadata ?? []).map(
|
||||
(cachedWatchlist?.response.MediaContainer.Metadata ?? []).map(
|
||||
async (watchlistItem) => {
|
||||
const detailedResponse = await this.getRolling<MetadataResponse>(
|
||||
`/library/metadata/${watchlistItem.ratingKey}`,
|
||||
{
|
||||
baseURL: 'https://metadata.provider.plex.tv',
|
||||
}
|
||||
{},
|
||||
undefined,
|
||||
{},
|
||||
'https://metadata.provider.plex.tv'
|
||||
);
|
||||
|
||||
const metadata = detailedResponse.MediaContainer.Metadata[0];
|
||||
@@ -320,7 +342,7 @@ class PlexTvAPI extends ExternalAPI {
|
||||
return {
|
||||
offset,
|
||||
size,
|
||||
totalSize: response.data.MediaContainer.totalSize,
|
||||
totalSize: cachedWatchlist?.response.MediaContainer.totalSize ?? 0,
|
||||
items: filteredList,
|
||||
};
|
||||
} catch (e) {
|
||||
@@ -336,6 +358,29 @@ class PlexTvAPI extends ExternalAPI {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async pingToken() {
|
||||
try {
|
||||
const data: { pong: unknown } = await this.get(
|
||||
'/api/v2/ping',
|
||||
{},
|
||||
undefined,
|
||||
{
|
||||
headers: {
|
||||
'X-Plex-Client-Identifier': randomUUID(),
|
||||
},
|
||||
}
|
||||
);
|
||||
if (!data?.pong) {
|
||||
throw new Error('No pong response');
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to ping token', {
|
||||
label: 'Plex Refresh Token',
|
||||
errorMessage: e.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default PlexTvAPI;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import ExternalAPI from './externalapi';
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
|
||||
interface PushoverSoundsResponse {
|
||||
sounds: {
|
||||
@@ -26,24 +26,13 @@ export const mapSounds = (sounds: {
|
||||
|
||||
class PushoverAPI extends ExternalAPI {
|
||||
constructor() {
|
||||
super(
|
||||
'https://api.pushover.net/1',
|
||||
{},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
super('https://api.pushover.net/1');
|
||||
}
|
||||
|
||||
public async getSounds(appToken: string): Promise<PushoverSound[]> {
|
||||
try {
|
||||
const data = await this.get<PushoverSoundsResponse>('/sounds.json', {
|
||||
params: {
|
||||
token: appToken,
|
||||
},
|
||||
token: appToken,
|
||||
});
|
||||
|
||||
return mapSounds(data.sounds);
|
||||
|
||||
@@ -155,13 +155,13 @@ export interface IMDBRating {
|
||||
*/
|
||||
class IMDBRadarrProxy extends ExternalAPI {
|
||||
constructor() {
|
||||
super('https://api.radarr.video/v1', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
nodeCache: cacheManager.getCache('imdb').data,
|
||||
});
|
||||
super(
|
||||
'https://api.radarr.video/v1',
|
||||
{},
|
||||
{
|
||||
nodeCache: cacheManager.getCache('imdb').data,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -63,15 +63,12 @@ class RottenTomatoes extends ExternalAPI {
|
||||
super(
|
||||
'https://79frdp12pn-dsn.algolia.net/1/indexes/*',
|
||||
{
|
||||
'x-algolia-agent':
|
||||
'Algolia%20for%20JavaScript%20(4.14.3)%3B%20Browser%20(lite)',
|
||||
'x-algolia-agent': 'Algolia for JavaScript (4.14.3); Browser (lite)',
|
||||
'x-algolia-api-key': '175588f6e5f8319b27702e4cc4013561',
|
||||
'x-algolia-application-id': '79FRDP12PN',
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
'x-algolia-usertoken': settings.clientId,
|
||||
},
|
||||
nodeCache: cacheManager.getCache('rt').data,
|
||||
|
||||
@@ -113,9 +113,9 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
|
||||
public getSystemStatus = async (): Promise<SystemStatus> => {
|
||||
try {
|
||||
const response = await this.axios.get<SystemStatus>('/system/status');
|
||||
const data = await this.get<SystemStatus>('/system/status');
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`[${this.apiName}] Failed to retrieve system status: ${e.message}`
|
||||
@@ -157,16 +157,15 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
|
||||
public getQueue = async (): Promise<(QueueItem & QueueItemAppendT)[]> => {
|
||||
try {
|
||||
const response = await this.axios.get<QueueResponse<QueueItemAppendT>>(
|
||||
const data = await this.get<QueueResponse<QueueItemAppendT>>(
|
||||
`/queue`,
|
||||
{
|
||||
params: {
|
||||
includeEpisode: true,
|
||||
},
|
||||
}
|
||||
includeEpisode: 'true',
|
||||
},
|
||||
0
|
||||
);
|
||||
|
||||
return response.data.records;
|
||||
return data.records;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`[${this.apiName}] Failed to retrieve queue: ${e.message}`
|
||||
@@ -176,9 +175,9 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
|
||||
public getTags = async (): Promise<Tag[]> => {
|
||||
try {
|
||||
const response = await this.axios.get<Tag[]>(`/tag`);
|
||||
const data = await this.get<Tag[]>(`/tag`);
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`[${this.apiName}] Failed to retrieve tags: ${e.message}`
|
||||
@@ -188,11 +187,11 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
|
||||
public createTag = async ({ label }: { label: string }): Promise<Tag> => {
|
||||
try {
|
||||
const response = await this.axios.post<Tag>(`/tag`, {
|
||||
const data = await this.post<Tag>(`/tag`, {
|
||||
label,
|
||||
});
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[${this.apiName}] Failed to create tag: ${e.message}`);
|
||||
}
|
||||
@@ -207,10 +206,15 @@ class ServarrBase<QueueItemAppendT> extends ExternalAPI {
|
||||
options: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
try {
|
||||
await this.axios.post(`/command`, {
|
||||
name: commandName,
|
||||
...options,
|
||||
});
|
||||
await this.post(
|
||||
`/command`,
|
||||
{
|
||||
name: commandName,
|
||||
...options,
|
||||
},
|
||||
{},
|
||||
0
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(`[${this.apiName}] Failed to run command: ${e.message}`);
|
||||
}
|
||||
|
||||
@@ -37,9 +37,9 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
|
||||
public getMovies = async (): Promise<RadarrMovie[]> => {
|
||||
try {
|
||||
const response = await this.axios.get<RadarrMovie[]>('/movie');
|
||||
const data = await this.get<RadarrMovie[]>('/movie');
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[Radarr] Failed to retrieve movies: ${e.message}`);
|
||||
}
|
||||
@@ -47,9 +47,9 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
|
||||
public getMovie = async ({ id }: { id: number }): Promise<RadarrMovie> => {
|
||||
try {
|
||||
const response = await this.axios.get<RadarrMovie>(`/movie/${id}`);
|
||||
const data = await this.get<RadarrMovie>(`/movie/${id}`);
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[Radarr] Failed to retrieve movie: ${e.message}`);
|
||||
}
|
||||
@@ -57,17 +57,15 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
|
||||
public async getMovieByTmdbId(id: number): Promise<RadarrMovie> {
|
||||
try {
|
||||
const response = await this.axios.get<RadarrMovie[]>('/movie/lookup', {
|
||||
params: {
|
||||
term: `tmdb:${id}`,
|
||||
},
|
||||
const data = await this.get<RadarrMovie[]>('/movie/lookup', {
|
||||
term: `tmdb:${id}`,
|
||||
});
|
||||
|
||||
if (!response.data[0]) {
|
||||
if (!data[0]) {
|
||||
throw new Error('Movie not found');
|
||||
}
|
||||
|
||||
return response.data[0];
|
||||
return data[0];
|
||||
} catch (e) {
|
||||
logger.error('Error retrieving movie by TMDB ID', {
|
||||
label: 'Radarr API',
|
||||
@@ -97,7 +95,7 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
|
||||
// movie exists in Radarr but is neither downloaded nor monitored
|
||||
if (movie.id && !movie.monitored) {
|
||||
const response = await this.axios.put<RadarrMovie>(`/movie`, {
|
||||
const data = await this.put<RadarrMovie>(`/movie`, {
|
||||
...movie,
|
||||
title: options.title,
|
||||
qualityProfileId: options.qualityProfileId,
|
||||
@@ -114,25 +112,25 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
},
|
||||
});
|
||||
|
||||
if (response.data.monitored) {
|
||||
if (data.monitored) {
|
||||
logger.info(
|
||||
'Found existing title in Radarr and set it to monitored.',
|
||||
{
|
||||
label: 'Radarr',
|
||||
movieId: response.data.id,
|
||||
movieTitle: response.data.title,
|
||||
movieId: data.id,
|
||||
movieTitle: data.title,
|
||||
}
|
||||
);
|
||||
logger.debug('Radarr update details', {
|
||||
label: 'Radarr',
|
||||
movie: response.data,
|
||||
movie: data,
|
||||
});
|
||||
|
||||
if (options.searchNow) {
|
||||
this.searchMovie(response.data.id);
|
||||
this.searchMovie(data.id);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} else {
|
||||
logger.error('Failed to update existing movie in Radarr.', {
|
||||
label: 'Radarr',
|
||||
@@ -150,7 +148,7 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
return movie;
|
||||
}
|
||||
|
||||
const response = await this.axios.post<RadarrMovie>(`/movie`, {
|
||||
const data = await this.post<RadarrMovie>(`/movie`, {
|
||||
title: options.title,
|
||||
qualityProfileId: options.qualityProfileId,
|
||||
profileId: options.profileId,
|
||||
@@ -166,11 +164,11 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
},
|
||||
});
|
||||
|
||||
if (response.data.id) {
|
||||
if (data.id) {
|
||||
logger.info('Radarr accepted request', { label: 'Radarr' });
|
||||
logger.debug('Radarr add details', {
|
||||
label: 'Radarr',
|
||||
movie: response.data,
|
||||
movie: data,
|
||||
});
|
||||
} else {
|
||||
logger.error('Failed to add movie to Radarr', {
|
||||
@@ -179,7 +177,7 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
});
|
||||
throw new Error('Failed to add movie to Radarr');
|
||||
}
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -223,11 +221,9 @@ class RadarrAPI extends ServarrBase<{ movieId: number }> {
|
||||
public removeMovie = async (movieId: number): Promise<void> => {
|
||||
try {
|
||||
const { id, title } = await this.getMovieByTmdbId(movieId);
|
||||
await this.axios.delete(`/movie/${id}`, {
|
||||
params: {
|
||||
deleteFiles: true,
|
||||
addImportExclusion: false,
|
||||
},
|
||||
await this.delete(`/movie/${id}`, {
|
||||
deleteFiles: 'true',
|
||||
addImportExclusion: 'false',
|
||||
});
|
||||
logger.info(`[Radarr] Removed movie ${title}`);
|
||||
} catch (e) {
|
||||
|
||||
@@ -117,9 +117,9 @@ class SonarrAPI extends ServarrBase<{
|
||||
|
||||
public async getSeries(): Promise<SonarrSeries[]> {
|
||||
try {
|
||||
const response = await this.axios.get<SonarrSeries[]>('/series');
|
||||
const data = await this.get<SonarrSeries[]>('/series');
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[Sonarr] Failed to retrieve series: ${e.message}`);
|
||||
}
|
||||
@@ -127,9 +127,9 @@ class SonarrAPI extends ServarrBase<{
|
||||
|
||||
public async getSeriesById(id: number): Promise<SonarrSeries> {
|
||||
try {
|
||||
const response = await this.axios.get<SonarrSeries>(`/series/${id}`);
|
||||
const data = await this.get<SonarrSeries>(`/series/${id}`);
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
throw new Error(`[Sonarr] Failed to retrieve series by ID: ${e.message}`);
|
||||
}
|
||||
@@ -137,17 +137,15 @@ class SonarrAPI extends ServarrBase<{
|
||||
|
||||
public async getSeriesByTitle(title: string): Promise<SonarrSeries[]> {
|
||||
try {
|
||||
const response = await this.axios.get<SonarrSeries[]>('/series/lookup', {
|
||||
params: {
|
||||
term: title,
|
||||
},
|
||||
const data = await this.get<SonarrSeries[]>('/series/lookup', {
|
||||
term: title,
|
||||
});
|
||||
|
||||
if (!response.data[0]) {
|
||||
if (!data[0]) {
|
||||
throw new Error('No series found');
|
||||
}
|
||||
|
||||
return response.data;
|
||||
return data;
|
||||
} catch (e) {
|
||||
logger.error('Error retrieving series by series title', {
|
||||
label: 'Sonarr API',
|
||||
@@ -160,17 +158,15 @@ class SonarrAPI extends ServarrBase<{
|
||||
|
||||
public async getSeriesByTvdbId(id: number): Promise<SonarrSeries> {
|
||||
try {
|
||||
const response = await this.axios.get<SonarrSeries[]>('/series/lookup', {
|
||||
params: {
|
||||
term: `tvdb:${id}`,
|
||||
},
|
||||
const data = await this.get<SonarrSeries[]>('/series/lookup', {
|
||||
term: `tvdb:${id}`,
|
||||
});
|
||||
|
||||
if (!response.data[0]) {
|
||||
if (!data[0]) {
|
||||
throw new Error('Series not found');
|
||||
}
|
||||
|
||||
return response.data[0];
|
||||
return data[0];
|
||||
} catch (e) {
|
||||
logger.error('Error retrieving series by tvdb ID', {
|
||||
label: 'Sonarr API',
|
||||
@@ -191,27 +187,27 @@ class SonarrAPI extends ServarrBase<{
|
||||
series.tags = options.tags ?? series.tags;
|
||||
series.seasons = this.buildSeasonList(options.seasons, series.seasons);
|
||||
|
||||
const newSeriesResponse = await this.axios.put<SonarrSeries>(
|
||||
const newSeriesData = await this.put<SonarrSeries>(
|
||||
'/series',
|
||||
series
|
||||
series as any
|
||||
);
|
||||
|
||||
if (newSeriesResponse.data.id) {
|
||||
if (newSeriesData.id) {
|
||||
logger.info('Updated existing series in Sonarr.', {
|
||||
label: 'Sonarr',
|
||||
seriesId: newSeriesResponse.data.id,
|
||||
seriesTitle: newSeriesResponse.data.title,
|
||||
seriesId: newSeriesData.id,
|
||||
seriesTitle: newSeriesData.title,
|
||||
});
|
||||
logger.debug('Sonarr update details', {
|
||||
label: 'Sonarr',
|
||||
movie: newSeriesResponse.data,
|
||||
movie: newSeriesData,
|
||||
});
|
||||
|
||||
if (options.searchNow) {
|
||||
this.searchSeries(newSeriesResponse.data.id);
|
||||
this.searchSeries(newSeriesData.id);
|
||||
}
|
||||
|
||||
return newSeriesResponse.data;
|
||||
return newSeriesData;
|
||||
} else {
|
||||
logger.error('Failed to update series in Sonarr', {
|
||||
label: 'Sonarr',
|
||||
@@ -221,38 +217,35 @@ class SonarrAPI extends ServarrBase<{
|
||||
}
|
||||
}
|
||||
|
||||
const createdSeriesResponse = await this.axios.post<SonarrSeries>(
|
||||
'/series',
|
||||
{
|
||||
tvdbId: options.tvdbid,
|
||||
title: options.title,
|
||||
qualityProfileId: options.profileId,
|
||||
languageProfileId: options.languageProfileId,
|
||||
seasons: this.buildSeasonList(
|
||||
options.seasons,
|
||||
series.seasons.map((season) => ({
|
||||
seasonNumber: season.seasonNumber,
|
||||
// We force all seasons to false if its the first request
|
||||
monitored: false,
|
||||
}))
|
||||
),
|
||||
tags: options.tags,
|
||||
seasonFolder: options.seasonFolder,
|
||||
monitored: options.monitored,
|
||||
rootFolderPath: options.rootFolderPath,
|
||||
seriesType: options.seriesType,
|
||||
addOptions: {
|
||||
ignoreEpisodesWithFiles: true,
|
||||
searchForMissingEpisodes: options.searchNow,
|
||||
},
|
||||
} as Partial<SonarrSeries>
|
||||
);
|
||||
const createdSeriesData = await this.post<SonarrSeries>('/series', {
|
||||
tvdbId: options.tvdbid,
|
||||
title: options.title,
|
||||
qualityProfileId: options.profileId,
|
||||
languageProfileId: options.languageProfileId,
|
||||
seasons: this.buildSeasonList(
|
||||
options.seasons,
|
||||
series.seasons.map((season) => ({
|
||||
seasonNumber: season.seasonNumber,
|
||||
// We force all seasons to false if its the first request
|
||||
monitored: false,
|
||||
}))
|
||||
),
|
||||
tags: options.tags,
|
||||
seasonFolder: options.seasonFolder,
|
||||
monitored: options.monitored,
|
||||
rootFolderPath: options.rootFolderPath,
|
||||
seriesType: options.seriesType,
|
||||
addOptions: {
|
||||
ignoreEpisodesWithFiles: true,
|
||||
searchForMissingEpisodes: options.searchNow,
|
||||
},
|
||||
} as Partial<SonarrSeries>);
|
||||
|
||||
if (createdSeriesResponse.data.id) {
|
||||
if (createdSeriesData.id) {
|
||||
logger.info('Sonarr accepted request', { label: 'Sonarr' });
|
||||
logger.debug('Sonarr add details', {
|
||||
label: 'Sonarr',
|
||||
movie: createdSeriesResponse.data,
|
||||
movie: createdSeriesData,
|
||||
});
|
||||
} else {
|
||||
logger.error('Failed to add movie to Sonarr', {
|
||||
@@ -262,7 +255,7 @@ class SonarrAPI extends ServarrBase<{
|
||||
throw new Error('Failed to add series to Sonarr');
|
||||
}
|
||||
|
||||
return createdSeriesResponse.data;
|
||||
return createdSeriesData;
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -347,14 +340,13 @@ class SonarrAPI extends ServarrBase<{
|
||||
|
||||
return newSeasons;
|
||||
}
|
||||
|
||||
public removeSerie = async (serieId: number): Promise<void> => {
|
||||
try {
|
||||
const { id, title } = await this.getSeriesByTvdbId(serieId);
|
||||
await this.axios.delete(`/series/${id}`, {
|
||||
params: {
|
||||
deleteFiles: true,
|
||||
addImportExclusion: false,
|
||||
},
|
||||
await this.delete(`/series/${id}`, {
|
||||
deleteFiles: 'true',
|
||||
addImportExclusion: 'false',
|
||||
});
|
||||
logger.info(`[Radarr] Removed serie ${title}`);
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import ExternalAPI from '@server/api/externalapi';
|
||||
import type { User } from '@server/entity/User';
|
||||
import type { TautulliSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import type { AxiosInstance } from 'axios';
|
||||
import axios from 'axios';
|
||||
import { uniqWith } from 'lodash';
|
||||
|
||||
export interface TautulliHistoryRecord {
|
||||
@@ -113,25 +112,25 @@ interface TautulliInfoResponse {
|
||||
};
|
||||
}
|
||||
|
||||
class TautulliAPI {
|
||||
private axios: AxiosInstance;
|
||||
|
||||
class TautulliAPI extends ExternalAPI {
|
||||
constructor(settings: TautulliSettings) {
|
||||
this.axios = axios.create({
|
||||
baseURL: `${settings.useSsl ? 'https' : 'http'}://${settings.hostname}:${
|
||||
super(
|
||||
`${settings.useSsl ? 'https' : 'http'}://${settings.hostname}:${
|
||||
settings.port
|
||||
}${settings.urlBase ?? ''}`,
|
||||
params: { apikey: settings.apiKey },
|
||||
});
|
||||
{
|
||||
apikey: settings.apiKey || '',
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
public async getInfo(): Promise<TautulliInfo> {
|
||||
try {
|
||||
return (
|
||||
await this.axios.get<TautulliInfoResponse>('/api/v2', {
|
||||
params: { cmd: 'get_tautulli_info' },
|
||||
await this.get<TautulliInfoResponse>('/api/v2', {
|
||||
cmd: 'get_tautulli_info',
|
||||
})
|
||||
).data.response.data;
|
||||
).response.data;
|
||||
} catch (e) {
|
||||
logger.error('Something went wrong fetching Tautulli server info', {
|
||||
label: 'Tautulli API',
|
||||
@@ -148,14 +147,12 @@ class TautulliAPI {
|
||||
): Promise<TautulliWatchStats[]> {
|
||||
try {
|
||||
return (
|
||||
await this.axios.get<TautulliWatchStatsResponse>('/api/v2', {
|
||||
params: {
|
||||
cmd: 'get_item_watch_time_stats',
|
||||
rating_key: ratingKey,
|
||||
grouping: 1,
|
||||
},
|
||||
await this.get<TautulliWatchStatsResponse>('/api/v2', {
|
||||
cmd: 'get_item_watch_time_stats',
|
||||
rating_key: ratingKey,
|
||||
grouping: '1',
|
||||
})
|
||||
).data.response.data;
|
||||
).response.data;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'Something went wrong fetching media watch stats from Tautulli',
|
||||
@@ -176,14 +173,12 @@ class TautulliAPI {
|
||||
): Promise<TautulliWatchUser[]> {
|
||||
try {
|
||||
return (
|
||||
await this.axios.get<TautulliWatchUsersResponse>('/api/v2', {
|
||||
params: {
|
||||
cmd: 'get_item_user_stats',
|
||||
rating_key: ratingKey,
|
||||
grouping: 1,
|
||||
},
|
||||
await this.get<TautulliWatchUsersResponse>('/api/v2', {
|
||||
cmd: 'get_item_user_stats',
|
||||
rating_key: ratingKey,
|
||||
grouping: '1',
|
||||
})
|
||||
).data.response.data;
|
||||
).response.data;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'Something went wrong fetching media watch users from Tautulli',
|
||||
@@ -206,15 +201,13 @@ class TautulliAPI {
|
||||
}
|
||||
|
||||
return (
|
||||
await this.axios.get<TautulliWatchStatsResponse>('/api/v2', {
|
||||
params: {
|
||||
cmd: 'get_user_watch_time_stats',
|
||||
user_id: user.plexId,
|
||||
query_days: 0,
|
||||
grouping: 1,
|
||||
},
|
||||
await this.get<TautulliWatchStatsResponse>('/api/v2', {
|
||||
cmd: 'get_user_watch_time_stats',
|
||||
user_id: user.plexId.toString(),
|
||||
query_days: '0',
|
||||
grouping: '1',
|
||||
})
|
||||
).data.response.data[0];
|
||||
).response.data[0];
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'Something went wrong fetching user watch stats from Tautulli',
|
||||
@@ -245,19 +238,17 @@ class TautulliAPI {
|
||||
|
||||
while (results.length < 20) {
|
||||
const tautulliData = (
|
||||
await this.axios.get<TautulliHistoryResponse>('/api/v2', {
|
||||
params: {
|
||||
cmd: 'get_history',
|
||||
grouping: 1,
|
||||
order_column: 'date',
|
||||
order_dir: 'desc',
|
||||
user_id: user.plexId,
|
||||
media_type: 'movie,episode',
|
||||
length: take,
|
||||
start,
|
||||
},
|
||||
await this.get<TautulliHistoryResponse>('/api/v2', {
|
||||
cmd: 'get_history',
|
||||
grouping: '1',
|
||||
order_column: 'date',
|
||||
order_dir: 'desc',
|
||||
user_id: user.plexId.toString(),
|
||||
media_type: 'movie,episode',
|
||||
length: take.toString(),
|
||||
start: start.toString(),
|
||||
})
|
||||
).data.response.data.data;
|
||||
).response.data.data;
|
||||
|
||||
if (!tautulliData.length) {
|
||||
return results;
|
||||
|
||||
@@ -113,8 +113,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
{
|
||||
nodeCache: cacheManager.getCache('tmdb').data,
|
||||
rateLimit: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
id: 'tmdb',
|
||||
},
|
||||
}
|
||||
);
|
||||
@@ -130,7 +130,10 @@ class TheMovieDb extends ExternalAPI {
|
||||
}: SearchOptions): Promise<TmdbSearchMultiResponse> => {
|
||||
try {
|
||||
const data = await this.get<TmdbSearchMultiResponse>('/search/multi', {
|
||||
params: { query, page, include_adult: includeAdult, language },
|
||||
query,
|
||||
page: page.toString(),
|
||||
include_adult: includeAdult ? 'true' : 'false',
|
||||
language,
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -153,13 +156,11 @@ class TheMovieDb extends ExternalAPI {
|
||||
}: SingleSearchOptions): Promise<TmdbSearchMovieResponse> => {
|
||||
try {
|
||||
const data = await this.get<TmdbSearchMovieResponse>('/search/movie', {
|
||||
params: {
|
||||
query,
|
||||
page,
|
||||
include_adult: includeAdult,
|
||||
language,
|
||||
primary_release_year: year,
|
||||
},
|
||||
query,
|
||||
page: page.toString(),
|
||||
include_adult: includeAdult ? 'true' : 'false',
|
||||
language,
|
||||
primary_release_year: year?.toString() || '',
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -182,13 +183,11 @@ class TheMovieDb extends ExternalAPI {
|
||||
}: SingleSearchOptions): Promise<TmdbSearchTvResponse> => {
|
||||
try {
|
||||
const data = await this.get<TmdbSearchTvResponse>('/search/tv', {
|
||||
params: {
|
||||
query,
|
||||
page,
|
||||
include_adult: includeAdult,
|
||||
language,
|
||||
first_air_date_year: year,
|
||||
},
|
||||
query,
|
||||
page: page.toString(),
|
||||
include_adult: includeAdult ? 'true' : 'false',
|
||||
language,
|
||||
first_air_date_year: year?.toString() || '',
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -211,7 +210,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
}): Promise<TmdbPersonDetails> => {
|
||||
try {
|
||||
const data = await this.get<TmdbPersonDetails>(`/person/${personId}`, {
|
||||
params: { language },
|
||||
language,
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -231,7 +230,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbPersonCombinedCredits>(
|
||||
`/person/${personId}/combined_credits`,
|
||||
{
|
||||
params: { language },
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -254,11 +253,9 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbMovieDetails>(
|
||||
`/movie/${movieId}`,
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
append_to_response:
|
||||
'credits,external_ids,videos,keywords,release_dates,watch/providers',
|
||||
},
|
||||
language,
|
||||
append_to_response:
|
||||
'credits,external_ids,videos,keywords,release_dates,watch/providers',
|
||||
},
|
||||
43200
|
||||
);
|
||||
@@ -280,11 +277,9 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbTvDetails>(
|
||||
`/tv/${tvId}`,
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
append_to_response:
|
||||
'aggregate_credits,credits,external_ids,keywords,videos,content_ratings,watch/providers',
|
||||
},
|
||||
language,
|
||||
append_to_response:
|
||||
'aggregate_credits,credits,external_ids,keywords,videos,content_ratings,watch/providers',
|
||||
},
|
||||
43200
|
||||
);
|
||||
@@ -308,10 +303,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSeasonWithEpisodes>(
|
||||
`/tv/${tvId}/season/${seasonNumber}`,
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
append_to_response: 'external_ids',
|
||||
},
|
||||
language: language || '',
|
||||
append_to_response: 'external_ids',
|
||||
}
|
||||
);
|
||||
|
||||
@@ -334,10 +327,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchMovieResponse>(
|
||||
`/movie/${movieId}/recommendations`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -360,10 +351,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchMovieResponse>(
|
||||
`/movie/${movieId}/similar`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -386,10 +375,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchMovieResponse>(
|
||||
`/keyword/${keywordId}/movies`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -412,10 +399,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchTvResponse>(
|
||||
`/tv/${tvId}/recommendations`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -438,10 +423,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
}): Promise<TmdbSearchTvResponse> {
|
||||
try {
|
||||
const data = await this.get<TmdbSearchTvResponse>(`/tv/${tvId}/similar`, {
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -482,40 +465,38 @@ class TheMovieDb extends ExternalAPI {
|
||||
.split('T')[0];
|
||||
|
||||
const data = await this.get<TmdbSearchMovieResponse>('/discover/movie', {
|
||||
params: {
|
||||
sort_by: sortBy,
|
||||
page,
|
||||
include_adult: includeAdult,
|
||||
language,
|
||||
region: this.region,
|
||||
with_original_language:
|
||||
originalLanguage && originalLanguage !== 'all'
|
||||
? originalLanguage
|
||||
: originalLanguage === 'all'
|
||||
? undefined
|
||||
: this.originalLanguage,
|
||||
// Set our release date values, but check if one is set and not the other,
|
||||
// so we can force a past date or a future date. TMDB Requires both values if one is set!
|
||||
'primary_release_date.gte':
|
||||
!primaryReleaseDateGte && primaryReleaseDateLte
|
||||
? defaultPastDate
|
||||
: primaryReleaseDateGte,
|
||||
'primary_release_date.lte':
|
||||
!primaryReleaseDateLte && primaryReleaseDateGte
|
||||
? defaultFutureDate
|
||||
: primaryReleaseDateLte,
|
||||
with_genres: genre,
|
||||
with_companies: studio,
|
||||
with_keywords: keywords,
|
||||
'with_runtime.gte': withRuntimeGte,
|
||||
'with_runtime.lte': withRuntimeLte,
|
||||
'vote_average.gte': voteAverageGte,
|
||||
'vote_average.lte': voteAverageLte,
|
||||
'vote_count.gte': voteCountGte,
|
||||
'vote_count.lte': voteCountLte,
|
||||
watch_region: watchRegion,
|
||||
with_watch_providers: watchProviders,
|
||||
},
|
||||
sort_by: sortBy,
|
||||
page: page.toString(),
|
||||
include_adult: includeAdult ? 'true' : 'false',
|
||||
language,
|
||||
region: this.region || '',
|
||||
with_original_language:
|
||||
originalLanguage && originalLanguage !== 'all'
|
||||
? originalLanguage
|
||||
: originalLanguage === 'all'
|
||||
? ''
|
||||
: this.originalLanguage || '',
|
||||
// Set our release date values, but check if one is set and not the other,
|
||||
// so we can force a past date or a future date. TMDB Requires both values if one is set!
|
||||
'primary_release_date.gte':
|
||||
!primaryReleaseDateGte && primaryReleaseDateLte
|
||||
? defaultPastDate
|
||||
: primaryReleaseDateGte || '',
|
||||
'primary_release_date.lte':
|
||||
!primaryReleaseDateLte && primaryReleaseDateGte
|
||||
? defaultFutureDate
|
||||
: primaryReleaseDateLte || '',
|
||||
with_genres: genre || '',
|
||||
with_companies: studio || '',
|
||||
with_keywords: keywords || '',
|
||||
'with_runtime.gte': withRuntimeGte || '',
|
||||
'with_runtime.lte': withRuntimeLte || '',
|
||||
'vote_average.gte': voteAverageGte || '',
|
||||
'vote_average.lte': voteAverageLte || '',
|
||||
'vote_count.gte': voteCountGte || '',
|
||||
'vote_count.lte': voteCountLte || '',
|
||||
watch_region: watchRegion || '',
|
||||
with_watch_providers: watchProviders || '',
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -557,43 +538,41 @@ class TheMovieDb extends ExternalAPI {
|
||||
.split('T')[0];
|
||||
|
||||
const data = await this.get<TmdbSearchTvResponse>('/discover/tv', {
|
||||
params: {
|
||||
sort_by: sortBy,
|
||||
page: page.toString(),
|
||||
language,
|
||||
region: this.region || '',
|
||||
// Set our release date values, but check if one is set and not the other,
|
||||
// so we can force a past date or a future date. TMDB Requires both values if one is set!
|
||||
'first_air_date.gte':
|
||||
!firstAirDateGte && firstAirDateLte
|
||||
? defaultPastDate
|
||||
: firstAirDateGte || '',
|
||||
'first_air_date.lte':
|
||||
!firstAirDateLte && firstAirDateGte
|
||||
? defaultFutureDate
|
||||
: firstAirDateLte || '',
|
||||
with_original_language:
|
||||
originalLanguage && originalLanguage !== 'all'
|
||||
? originalLanguage
|
||||
: originalLanguage === 'all'
|
||||
? ''
|
||||
: this.originalLanguage || '',
|
||||
include_null_first_air_dates: includeEmptyReleaseDate
|
||||
? 'true'
|
||||
: 'false',
|
||||
with_genres: genre || '',
|
||||
with_networks: network?.toString() || '',
|
||||
with_keywords: keywords || '',
|
||||
'with_runtime.gte': withRuntimeGte || '',
|
||||
'with_runtime.lte': withRuntimeLte || '',
|
||||
'vote_average.gte': voteAverageGte || '',
|
||||
'vote_average.lte': voteAverageLte || '',
|
||||
'vote_count.gte': voteCountGte || '',
|
||||
'vote_count.lte': voteCountLte || '',
|
||||
with_watch_providers: watchProviders || '',
|
||||
watch_region: watchRegion || '',
|
||||
with_status: withStatus || '',
|
||||
},
|
||||
sort_by: sortBy,
|
||||
page: page.toString(),
|
||||
language,
|
||||
region: this.region || '',
|
||||
// Set our release date values, but check if one is set and not the other,
|
||||
// so we can force a past date or a future date. TMDB Requires both values if one is set!
|
||||
'first_air_date.gte':
|
||||
!firstAirDateGte && firstAirDateLte
|
||||
? defaultPastDate
|
||||
: firstAirDateGte || '',
|
||||
'first_air_date.lte':
|
||||
!firstAirDateLte && firstAirDateGte
|
||||
? defaultFutureDate
|
||||
: firstAirDateLte || '',
|
||||
with_original_language:
|
||||
originalLanguage && originalLanguage !== 'all'
|
||||
? originalLanguage
|
||||
: originalLanguage === 'all'
|
||||
? ''
|
||||
: this.originalLanguage || '',
|
||||
include_null_first_air_dates: includeEmptyReleaseDate
|
||||
? 'true'
|
||||
: 'false',
|
||||
with_genres: genre || '',
|
||||
with_networks: network?.toString() || '',
|
||||
with_keywords: keywords || '',
|
||||
'with_runtime.gte': withRuntimeGte || '',
|
||||
'with_runtime.lte': withRuntimeLte || '',
|
||||
'vote_average.gte': voteAverageGte || '',
|
||||
'vote_average.lte': voteAverageLte || '',
|
||||
'vote_count.gte': voteCountGte || '',
|
||||
'vote_count.lte': voteCountLte || '',
|
||||
with_watch_providers: watchProviders || '',
|
||||
watch_region: watchRegion || '',
|
||||
with_status: withStatus || '',
|
||||
});
|
||||
|
||||
return data;
|
||||
@@ -613,12 +592,10 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbUpcomingMoviesResponse>(
|
||||
'/movie/upcoming',
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
region: this.region,
|
||||
originalLanguage: this.originalLanguage,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
region: this.region || '',
|
||||
originalLanguage: this.originalLanguage || '',
|
||||
}
|
||||
);
|
||||
|
||||
@@ -641,11 +618,9 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchMultiResponse>(
|
||||
`/trending/all/${timeWindow}`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
language,
|
||||
region: this.region,
|
||||
},
|
||||
page: page.toString(),
|
||||
language,
|
||||
region: this.region || '',
|
||||
}
|
||||
);
|
||||
|
||||
@@ -666,9 +641,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchMovieResponse>(
|
||||
`/trending/movie/${timeWindow}`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
},
|
||||
page: page.toString(),
|
||||
}
|
||||
);
|
||||
|
||||
@@ -689,9 +662,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbSearchTvResponse>(
|
||||
`/trending/tv/${timeWindow}`,
|
||||
{
|
||||
params: {
|
||||
page,
|
||||
},
|
||||
page: page.toString(),
|
||||
}
|
||||
);
|
||||
|
||||
@@ -720,10 +691,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbExternalIdResponse>(
|
||||
`/find/${externalId}`,
|
||||
{
|
||||
params: {
|
||||
external_source: type === 'imdb' ? 'imdb_id' : 'tvdb_id',
|
||||
language,
|
||||
},
|
||||
external_source: type === 'imdb' ? 'imdb_id' : 'tvdb_id',
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -813,9 +782,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbCollection>(
|
||||
`/collection/${collectionId}`,
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
},
|
||||
language,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -888,9 +855,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbGenresResult>(
|
||||
'/genre/movie/list',
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
},
|
||||
language,
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -902,9 +867,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const englishData = await this.get<TmdbGenresResult>(
|
||||
'/genre/movie/list',
|
||||
{
|
||||
params: {
|
||||
language: 'en',
|
||||
},
|
||||
language: 'en',
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -939,9 +902,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbGenresResult>(
|
||||
'/genre/tv/list',
|
||||
{
|
||||
params: {
|
||||
language,
|
||||
},
|
||||
language,
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -953,9 +914,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const englishData = await this.get<TmdbGenresResult>(
|
||||
'/genre/tv/list',
|
||||
{
|
||||
params: {
|
||||
language: 'en',
|
||||
},
|
||||
language: 'en',
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -1010,10 +969,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbKeywordSearchResponse>(
|
||||
'/search/keyword',
|
||||
{
|
||||
params: {
|
||||
query,
|
||||
page,
|
||||
},
|
||||
query,
|
||||
page: page.toString(),
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -1035,10 +992,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<TmdbCompanySearchResponse>(
|
||||
'/search/company',
|
||||
{
|
||||
params: {
|
||||
query,
|
||||
page,
|
||||
},
|
||||
query,
|
||||
page: page.toString(),
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -1058,9 +1013,7 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<{ results: TmdbWatchProviderRegion[] }>(
|
||||
'/watch/providers/regions',
|
||||
{
|
||||
params: {
|
||||
language: language ?? this.originalLanguage,
|
||||
},
|
||||
language: language ? this.originalLanguage || '' : '',
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -1084,10 +1037,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<{ results: TmdbWatchProviderDetails[] }>(
|
||||
'/watch/providers/movie',
|
||||
{
|
||||
params: {
|
||||
language: language ?? this.originalLanguage,
|
||||
watch_region: watchRegion,
|
||||
},
|
||||
language: language ? this.originalLanguage || '' : '',
|
||||
watch_region: watchRegion,
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
@@ -1111,10 +1062,8 @@ class TheMovieDb extends ExternalAPI {
|
||||
const data = await this.get<{ results: TmdbWatchProviderDetails[] }>(
|
||||
'/watch/providers/tv',
|
||||
{
|
||||
params: {
|
||||
language: language ?? this.originalLanguage,
|
||||
watch_region: watchRegion,
|
||||
},
|
||||
language: language ? this.originalLanguage || '' : '',
|
||||
watch_region: watchRegion,
|
||||
},
|
||||
86400 // 24 hours
|
||||
);
|
||||
|
||||
@@ -1,7 +1,43 @@
|
||||
import 'reflect-metadata';
|
||||
import fs from 'fs';
|
||||
import type { TlsOptions } from 'tls';
|
||||
import type { DataSourceOptions, EntityTarget, Repository } from 'typeorm';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
const DB_SSL_PREFIX = 'DB_SSL_';
|
||||
|
||||
function boolFromEnv(envVar: string, defaultVal = false) {
|
||||
if (process.env[envVar]) {
|
||||
return process.env[envVar]?.toLowerCase() === 'true';
|
||||
}
|
||||
return defaultVal;
|
||||
}
|
||||
|
||||
function stringOrReadFileFromEnv(envVar: string): Buffer | string | undefined {
|
||||
if (process.env[envVar]) {
|
||||
return process.env[envVar];
|
||||
}
|
||||
const filePath = process.env[`${envVar}_FILE`];
|
||||
if (filePath) {
|
||||
return fs.readFileSync(filePath);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function buildSslConfig(): TlsOptions | undefined {
|
||||
if (process.env.DB_USE_SSL?.toLowerCase() !== 'true') {
|
||||
return undefined;
|
||||
}
|
||||
return {
|
||||
rejectUnauthorized: boolFromEnv(
|
||||
`${DB_SSL_PREFIX}REJECT_UNAUTHORIZED`,
|
||||
true
|
||||
),
|
||||
ca: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}CA`),
|
||||
key: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}KEY`),
|
||||
cert: stringOrReadFileFromEnv(`${DB_SSL_PREFIX}CERT`),
|
||||
};
|
||||
}
|
||||
|
||||
const devConfig: DataSourceOptions = {
|
||||
type: 'sqlite',
|
||||
database: process.env.CONFIG_DIRECTORY
|
||||
@@ -9,10 +45,10 @@ const devConfig: DataSourceOptions = {
|
||||
: 'config/db/db.sqlite3',
|
||||
synchronize: true,
|
||||
migrationsRun: false,
|
||||
logging: false,
|
||||
logging: boolFromEnv('DB_LOG_QUERIES'),
|
||||
enableWAL: true,
|
||||
entities: ['server/entity/**/*.ts'],
|
||||
migrations: ['server/migration/**/*.ts'],
|
||||
migrations: ['server/migration/sqlite/**/*.ts'],
|
||||
subscribers: ['server/subscriber/**/*.ts'],
|
||||
};
|
||||
|
||||
@@ -23,16 +59,56 @@ const prodConfig: DataSourceOptions = {
|
||||
: 'config/db/db.sqlite3',
|
||||
synchronize: false,
|
||||
migrationsRun: false,
|
||||
logging: false,
|
||||
logging: boolFromEnv('DB_LOG_QUERIES'),
|
||||
enableWAL: true,
|
||||
entities: ['dist/entity/**/*.js'],
|
||||
migrations: ['dist/migration/**/*.js'],
|
||||
migrations: ['dist/migration/sqlite/**/*.js'],
|
||||
subscribers: ['dist/subscriber/**/*.js'],
|
||||
};
|
||||
|
||||
const dataSource = new DataSource(
|
||||
process.env.NODE_ENV !== 'production' ? devConfig : prodConfig
|
||||
);
|
||||
const postgresDevConfig: DataSourceOptions = {
|
||||
type: 'postgres',
|
||||
host: process.env.DB_HOST,
|
||||
port: parseInt(process.env.DB_PORT ?? '5432'),
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME ?? 'jellyseerr',
|
||||
ssl: buildSslConfig(),
|
||||
synchronize: false,
|
||||
migrationsRun: true,
|
||||
logging: boolFromEnv('DB_LOG_QUERIES'),
|
||||
entities: ['server/entity/**/*.ts'],
|
||||
migrations: ['server/migration/postgres/**/*.ts'],
|
||||
subscribers: ['server/subscriber/**/*.ts'],
|
||||
};
|
||||
|
||||
const postgresProdConfig: DataSourceOptions = {
|
||||
type: 'postgres',
|
||||
host: process.env.DB_HOST,
|
||||
port: parseInt(process.env.DB_PORT ?? '5432'),
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME ?? 'jellyseerr',
|
||||
ssl: buildSslConfig(),
|
||||
synchronize: false,
|
||||
migrationsRun: false,
|
||||
logging: boolFromEnv('DB_LOG_QUERIES'),
|
||||
entities: ['dist/entity/**/*.js'],
|
||||
migrations: ['dist/migration/postgres/**/*.js'],
|
||||
subscribers: ['dist/subscriber/**/*.js'],
|
||||
};
|
||||
|
||||
export const isPgsql = process.env.DB_TYPE === 'postgres';
|
||||
|
||||
function getDataSource(): DataSourceOptions {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return isPgsql ? postgresProdConfig : prodConfig;
|
||||
} else {
|
||||
return isPgsql ? postgresDevConfig : devConfig;
|
||||
}
|
||||
}
|
||||
|
||||
const dataSource = new DataSource(getDataSource());
|
||||
|
||||
export const getRepository = <Entity extends object>(
|
||||
target: EntityTarget<Entity>
|
||||
|
||||
@@ -80,12 +80,12 @@ export class Blacklist implements BlacklistItem {
|
||||
status: MediaStatus.BLACKLISTED,
|
||||
status4k: MediaStatus.BLACKLISTED,
|
||||
mediaType: blacklistRequest.mediaType,
|
||||
blacklist: blacklist,
|
||||
blacklist: Promise.resolve(blacklist),
|
||||
});
|
||||
|
||||
await mediaRepository.save(media);
|
||||
} else {
|
||||
media.blacklist = blacklist;
|
||||
media.blacklist = Promise.resolve(blacklist);
|
||||
media.status = MediaStatus.BLACKLISTED;
|
||||
media.status4k = MediaStatus.BLACKLISTED;
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import type { DownloadingItem } from '@server/lib/downloadtracker';
|
||||
import downloadTracker from '@server/lib/downloadtracker';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import { DbAwareColumn } from '@server/utils/DbColumnHelper';
|
||||
import { getHostname } from '@server/utils/getHostname';
|
||||
import {
|
||||
AfterLoad,
|
||||
@@ -42,6 +43,10 @@ class Media {
|
||||
finalIds = tmdbIds;
|
||||
}
|
||||
|
||||
if (finalIds.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const media = await mediaRepository
|
||||
.createQueryBuilder('media')
|
||||
.leftJoinAndSelect(
|
||||
@@ -118,10 +123,8 @@ class Media {
|
||||
@OneToMany(() => Issue, (issue) => issue.media, { cascade: true })
|
||||
public issues: Issue[];
|
||||
|
||||
@OneToOne(() => Blacklist, (blacklist) => blacklist.media, {
|
||||
eager: true,
|
||||
})
|
||||
public blacklist: Blacklist;
|
||||
@OneToOne(() => Blacklist, (blacklist) => blacklist.media)
|
||||
public blacklist: Promise<Blacklist>;
|
||||
|
||||
@CreateDateColumn()
|
||||
public createdAt: Date;
|
||||
@@ -129,10 +132,23 @@ class Media {
|
||||
@UpdateDateColumn()
|
||||
public updatedAt: Date;
|
||||
|
||||
@Column({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
/**
|
||||
* The `lastSeasonChange` column stores the date and time when the media was added to the library.
|
||||
* It needs to be database-aware because SQLite supports `datetime` while PostgreSQL supports `timestamp with timezone (timestampz)`.
|
||||
*/
|
||||
@DbAwareColumn({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
|
||||
public lastSeasonChange: Date;
|
||||
|
||||
@Column({ type: 'datetime', nullable: true })
|
||||
/**
|
||||
* The `mediaAddedAt` column stores the date and time when the media was added to the library.
|
||||
* It needs to be database-aware because SQLite supports `datetime` while PostgreSQL supports `timestamp with timezone (timestampz)`.
|
||||
* This column is nullable because it can be null when the media is not yet synced to the library.
|
||||
*/
|
||||
@DbAwareColumn({
|
||||
type: 'datetime',
|
||||
default: () => 'CURRENT_TIMESTAMP',
|
||||
nullable: true,
|
||||
})
|
||||
public mediaAddedAt: Date;
|
||||
|
||||
@Column({ nullable: true, type: 'int' })
|
||||
|
||||
@@ -257,9 +257,7 @@ export class MediaRequest {
|
||||
>;
|
||||
const requestedSeasons =
|
||||
requestBody.seasons === 'all'
|
||||
? tmdbMediaShow.seasons
|
||||
.map((season) => season.season_number)
|
||||
.filter((sn) => sn > 0)
|
||||
? tmdbMediaShow.seasons.map((season) => season.season_number)
|
||||
: (requestBody.seasons as number[]);
|
||||
let existingSeasons: number[] = [];
|
||||
|
||||
@@ -387,6 +385,7 @@ export class MediaRequest {
|
||||
@ManyToOne(() => Media, (media) => media.requests, {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
public media: Media;
|
||||
|
||||
@@ -859,7 +858,7 @@ export class MediaRequest {
|
||||
const requestRepository = getRepository(MediaRequest);
|
||||
|
||||
this.status = MediaRequestStatus.FAILED;
|
||||
requestRepository.save(this);
|
||||
await requestRepository.save(this);
|
||||
|
||||
logger.warn(
|
||||
'Something went wrong sending movie request to Radarr, marking status as FAILED',
|
||||
@@ -1134,13 +1133,14 @@ export class MediaRequest {
|
||||
media[this.is4k ? 'externalServiceSlug4k' : 'externalServiceSlug'] =
|
||||
sonarrSeries.titleSlug;
|
||||
media[this.is4k ? 'serviceId4k' : 'serviceId'] = sonarrSettings?.id;
|
||||
|
||||
await mediaRepository.save(media);
|
||||
})
|
||||
.catch(async () => {
|
||||
const requestRepository = getRepository(MediaRequest);
|
||||
|
||||
this.status = MediaRequestStatus.FAILED;
|
||||
requestRepository.save(this);
|
||||
await requestRepository.save(this);
|
||||
|
||||
logger.warn(
|
||||
'Something went wrong sending series request to Sonarr, marking status as FAILED',
|
||||
|
||||
@@ -23,7 +23,10 @@ class Season {
|
||||
@Column({ type: 'int', default: MediaStatus.UNKNOWN })
|
||||
public status4k: MediaStatus;
|
||||
|
||||
@ManyToOne(() => Media, (media) => media.seasons, { onDelete: 'CASCADE' })
|
||||
@ManyToOne(() => Media, (media) => media.seasons, {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
public media: Promise<Media>;
|
||||
|
||||
@CreateDateColumn()
|
||||
|
||||
@@ -53,6 +53,7 @@ export class Watchlist implements WatchlistItem {
|
||||
@ManyToOne(() => Media, (media) => media.watchlists, {
|
||||
eager: true,
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
public media: Media;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import PlexAPI from '@server/api/plexapi';
|
||||
import dataSource, { getRepository } from '@server/datasource';
|
||||
import dataSource, { getRepository, isPgsql } from '@server/datasource';
|
||||
import DiscoverSlider from '@server/entity/DiscoverSlider';
|
||||
import { Session } from '@server/entity/Session';
|
||||
import { User } from '@server/entity/User';
|
||||
@@ -35,10 +35,17 @@ import * as OpenApiValidator from 'express-openapi-validator';
|
||||
import type { Store } from 'express-session';
|
||||
import session from 'express-session';
|
||||
import next from 'next';
|
||||
import dns from 'node:dns';
|
||||
import net from 'node:net';
|
||||
import path from 'path';
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import YAML from 'yamljs';
|
||||
|
||||
if (process.env.forceIpv4First === 'true') {
|
||||
dns.setDefaultResultOrder('ipv4first');
|
||||
net.setDefaultAutoSelectFamily(false);
|
||||
}
|
||||
|
||||
const API_SPEC_PATH = path.join(__dirname, '../overseerr-api.yml');
|
||||
|
||||
logger.info(`Starting Overseerr version ${getAppVersion()}`);
|
||||
@@ -59,9 +66,13 @@ app
|
||||
|
||||
// Run migrations in production
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
await dbConnection.query('PRAGMA foreign_keys=OFF');
|
||||
await dbConnection.runMigrations();
|
||||
await dbConnection.query('PRAGMA foreign_keys=ON');
|
||||
if (isPgsql) {
|
||||
await dbConnection.runMigrations();
|
||||
} else {
|
||||
await dbConnection.query('PRAGMA foreign_keys=OFF');
|
||||
await dbConnection.runMigrations();
|
||||
await dbConnection.query('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
}
|
||||
|
||||
// Load Settings
|
||||
|
||||
@@ -2,6 +2,7 @@ import { MediaServerType } from '@server/constants/server';
|
||||
import availabilitySync from '@server/lib/availabilitySync';
|
||||
import downloadTracker from '@server/lib/downloadtracker';
|
||||
import ImageProxy from '@server/lib/imageproxy';
|
||||
import refreshToken from '@server/lib/refreshToken';
|
||||
import {
|
||||
jellyfinFullScanner,
|
||||
jellyfinRecentScanner,
|
||||
@@ -13,7 +14,6 @@ import type { JobId } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import watchlistSync from '@server/lib/watchlistsync';
|
||||
import logger from '@server/logger';
|
||||
import random from 'lodash/random';
|
||||
import schedule from 'node-schedule';
|
||||
|
||||
interface ScheduledJob {
|
||||
@@ -113,30 +113,20 @@ export const startJobs = (): void => {
|
||||
}
|
||||
|
||||
// Watchlist Sync
|
||||
const watchlistSyncJob: ScheduledJob = {
|
||||
scheduledJobs.push({
|
||||
id: 'plex-watchlist-sync',
|
||||
name: 'Plex Watchlist Sync',
|
||||
type: 'process',
|
||||
interval: 'fixed',
|
||||
interval: 'seconds',
|
||||
cronSchedule: jobs['plex-watchlist-sync'].schedule,
|
||||
job: schedule.scheduleJob(new Date(Date.now() + 1000 * 60 * 20), () => {
|
||||
job: schedule.scheduleJob(jobs['plex-watchlist-sync'].schedule, () => {
|
||||
logger.info('Starting scheduled job: Plex Watchlist Sync', {
|
||||
label: 'Jobs',
|
||||
});
|
||||
watchlistSync.syncWatchlist();
|
||||
}),
|
||||
};
|
||||
|
||||
// To help alleviate load on Plex's servers, we will add some fuzziness to the next schedule
|
||||
// after each run
|
||||
watchlistSyncJob.job.on('run', () => {
|
||||
watchlistSyncJob.job.schedule(
|
||||
new Date(Math.floor(Date.now() + 1000 * 60 * random(14, 24, true)))
|
||||
);
|
||||
});
|
||||
|
||||
scheduledJobs.push(watchlistSyncJob);
|
||||
|
||||
// Run full radarr scan every 24 hours
|
||||
scheduledJobs.push({
|
||||
id: 'radarr-scan',
|
||||
@@ -233,5 +223,19 @@ export const startJobs = (): void => {
|
||||
}),
|
||||
});
|
||||
|
||||
scheduledJobs.push({
|
||||
id: 'plex-refresh-token',
|
||||
name: 'Plex Refresh Token',
|
||||
type: 'process',
|
||||
interval: 'fixed',
|
||||
cronSchedule: jobs['plex-refresh-token'].schedule,
|
||||
job: schedule.scheduleJob(jobs['plex-refresh-token'].schedule, () => {
|
||||
logger.info('Starting scheduled job: Plex Refresh Token', {
|
||||
label: 'Jobs',
|
||||
});
|
||||
refreshToken.run();
|
||||
}),
|
||||
});
|
||||
|
||||
logger.info('Scheduled jobs loaded', { label: 'Jobs' });
|
||||
};
|
||||
|
||||
@@ -8,7 +8,8 @@ export type AvailableCacheIds =
|
||||
| 'imdb'
|
||||
| 'github'
|
||||
| 'plexguid'
|
||||
| 'plextv';
|
||||
| 'plextv'
|
||||
| 'plexwatchlist';
|
||||
|
||||
const DEFAULT_TTL = 300;
|
||||
const DEFAULT_CHECK_PERIOD = 120;
|
||||
@@ -68,6 +69,7 @@ class CacheManager {
|
||||
stdTtl: 86400 * 7, // 1 week cache
|
||||
checkPeriod: 60,
|
||||
}),
|
||||
plexwatchlist: new Cache('plexwatchlist', 'Plex Watchlist'),
|
||||
};
|
||||
|
||||
public getCache(id: AvailableCacheIds): Cache {
|
||||
|
||||
@@ -106,7 +106,7 @@ class DownloadTracker {
|
||||
{ label: 'Download Tracker' }
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
} catch {
|
||||
logger.error(
|
||||
`Unable to get queue from Radarr server: ${server.name}`,
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import rateLimit, { type rateLimitOptions } from 'axios-rate-limit';
|
||||
import type { RateLimitOptions } from '@server/utils/rateLimit';
|
||||
import rateLimit from '@server/utils/rateLimit';
|
||||
import { createHash } from 'crypto';
|
||||
import { promises } from 'fs';
|
||||
import mime from 'mime/lite';
|
||||
@@ -131,29 +131,33 @@ class ImageProxy {
|
||||
return 0;
|
||||
}
|
||||
|
||||
private axios;
|
||||
private fetch: typeof fetch;
|
||||
private cacheVersion;
|
||||
private key;
|
||||
private baseUrl;
|
||||
private headers: HeadersInit | null = null;
|
||||
|
||||
constructor(
|
||||
key: string,
|
||||
baseUrl: string,
|
||||
options: {
|
||||
cacheVersion?: number;
|
||||
rateLimitOptions?: rateLimitOptions;
|
||||
headers?: Record<string, unknown>;
|
||||
rateLimitOptions?: RateLimitOptions;
|
||||
headers?: HeadersInit;
|
||||
} = {}
|
||||
) {
|
||||
this.cacheVersion = options.cacheVersion ?? 1;
|
||||
this.baseUrl = baseUrl;
|
||||
this.key = key;
|
||||
this.axios = axios.create({
|
||||
baseURL: baseUrl,
|
||||
headers: options.headers,
|
||||
});
|
||||
|
||||
if (options.rateLimitOptions) {
|
||||
this.axios = rateLimit(this.axios, options.rateLimitOptions);
|
||||
this.fetch = rateLimit(fetch, {
|
||||
...options.rateLimitOptions,
|
||||
});
|
||||
} else {
|
||||
this.fetch = fetch;
|
||||
}
|
||||
this.headers = options.headers || null;
|
||||
}
|
||||
|
||||
public async getImage(
|
||||
@@ -245,23 +249,34 @@ class ImageProxy {
|
||||
): Promise<ImageResponse | null> {
|
||||
try {
|
||||
const directory = join(this.getCacheDirectory(), cacheKey);
|
||||
const response = await this.axios.get(path, {
|
||||
responseType: 'arraybuffer',
|
||||
const href =
|
||||
this.baseUrl +
|
||||
(this.baseUrl.length > 0
|
||||
? this.baseUrl.endsWith('/')
|
||||
? ''
|
||||
: '/'
|
||||
: '') +
|
||||
(path.startsWith('/') ? path.slice(1) : path);
|
||||
const response = await this.fetch(href, {
|
||||
headers: this.headers || undefined,
|
||||
});
|
||||
|
||||
const buffer = Buffer.from(response.data, 'binary');
|
||||
if (!response.ok) {
|
||||
return null;
|
||||
}
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
const extension = mime.getExtension(
|
||||
response.headers['Content-Type']?.toString() ?? ''
|
||||
response.headers.get('content-type') ?? ''
|
||||
);
|
||||
|
||||
let maxAge = Number(
|
||||
(response.headers['Cache-Control']?.toString() ?? '0').split('=')[1]
|
||||
(response.headers.get('cache-control') ?? '0').split('=')[1]
|
||||
);
|
||||
|
||||
if (!maxAge) maxAge = 86400;
|
||||
const expireAt = Date.now() + maxAge * 1000;
|
||||
const etag = (response.headers.etag ?? '').replace(/"/g, '');
|
||||
const etag = (response.headers.get('etag') ?? '').replace(/"/g, '');
|
||||
|
||||
await this.writeToCacheDir(
|
||||
directory,
|
||||
|
||||
@@ -4,7 +4,6 @@ import { User } from '@server/entity/User';
|
||||
import type { NotificationAgentDiscord } from '@server/lib/settings';
|
||||
import { getSettings, NotificationAgentKey } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
hasNotificationType,
|
||||
Notification,
|
||||
@@ -292,14 +291,27 @@ class DiscordAgent
|
||||
}
|
||||
}
|
||||
|
||||
await axios.post(settings.options.webhookUrl, {
|
||||
username: settings.options.botUsername
|
||||
? settings.options.botUsername
|
||||
: getSettings().main.applicationTitle,
|
||||
avatar_url: settings.options.botAvatarUrl,
|
||||
embeds: [this.buildEmbed(type, payload)],
|
||||
content: userMentions.join(' '),
|
||||
} as DiscordWebhookPayload);
|
||||
if (settings.options.webhookRoleId) {
|
||||
userMentions.push(`<@&${settings.options.webhookRoleId}>`);
|
||||
}
|
||||
|
||||
const response = await fetch(settings.options.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
username: settings.options.botUsername
|
||||
? settings.options.botUsername
|
||||
: getSettings().main.applicationTitle,
|
||||
avatar_url: settings.options.botAvatarUrl,
|
||||
embeds: [this.buildEmbed(type, payload)],
|
||||
content: userMentions.join(' '),
|
||||
} as DiscordWebhookPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
|
||||
@@ -2,7 +2,6 @@ import { IssueStatus, IssueTypeName } from '@server/constants/issue';
|
||||
import type { NotificationAgentGotify } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import { hasNotificationType, Notification } from '..';
|
||||
import type { NotificationAgent, NotificationPayload } from './agent';
|
||||
import { BaseAgent } from './agent';
|
||||
@@ -133,7 +132,16 @@ class GotifyAgent
|
||||
const endpoint = `${settings.options.url}/message?token=${settings.options.token}`;
|
||||
const notificationPayload = this.getNotificationPayload(type, payload);
|
||||
|
||||
await axios.post(endpoint, notificationPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(notificationPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { MediaStatus } from '@server/constants/media';
|
||||
import type { NotificationAgentLunaSea } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import { hasNotificationType, Notification } from '..';
|
||||
import type { NotificationAgent, NotificationPayload } from './agent';
|
||||
import { BaseAgent } from './agent';
|
||||
@@ -101,19 +100,23 @@ class LunaSeaAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
settings.options.webhookUrl,
|
||||
this.buildPayload(type, payload),
|
||||
settings.options.profileName
|
||||
const response = await fetch(settings.options.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: settings.options.profileName
|
||||
? {
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(
|
||||
`${settings.options.profileName}:`
|
||||
).toString('base64')}`,
|
||||
},
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Basic ${Buffer.from(
|
||||
`${settings.options.profileName}:`
|
||||
).toString('base64')}`,
|
||||
},
|
||||
body: JSON.stringify(this.buildPayload(type, payload)),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { User } from '@server/entity/User';
|
||||
import type { NotificationAgentPushbullet } from '@server/lib/settings';
|
||||
import { getSettings, NotificationAgentKey } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
hasNotificationType,
|
||||
Notification,
|
||||
@@ -123,15 +122,20 @@ class PushbulletAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
endpoint,
|
||||
{ ...notificationPayload, channel_tag: settings.options.channelTag },
|
||||
{
|
||||
headers: {
|
||||
'Access-Token': settings.options.accessToken,
|
||||
},
|
||||
}
|
||||
);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Token': settings.options.accessToken,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
channel_tag: settings.options.channelTag,
|
||||
}),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -170,11 +174,17 @@ class PushbulletAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, notificationPayload, {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Token': payload.notifyUser.settings.pushbulletAccessToken,
|
||||
},
|
||||
body: JSON.stringify(notificationPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -225,11 +235,17 @@ class PushbulletAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, notificationPayload, {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Token': user.settings.pushbulletAccessToken,
|
||||
},
|
||||
body: JSON.stringify(notificationPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { User } from '@server/entity/User';
|
||||
import type { NotificationAgentPushover } from '@server/lib/settings';
|
||||
import { getSettings, NotificationAgentKey } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
hasNotificationType,
|
||||
Notification,
|
||||
@@ -52,12 +51,15 @@ class PushoverAgent
|
||||
imageUrl: string
|
||||
): Promise<Partial<PushoverImagePayload>> {
|
||||
try {
|
||||
const response = await axios.get(imageUrl, {
|
||||
responseType: 'arraybuffer',
|
||||
});
|
||||
const base64 = Buffer.from(response.data, 'binary').toString('base64');
|
||||
const response = await fetch(imageUrl);
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64');
|
||||
const contentType = (
|
||||
response.headers['Content-Type'] || response.headers['content-type']
|
||||
response.headers.get('Content-Type') ||
|
||||
response.headers.get('content-type')
|
||||
)?.toString();
|
||||
|
||||
return {
|
||||
@@ -208,12 +210,21 @@ class PushoverAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
token: settings.options.accessToken,
|
||||
user: settings.options.userToken,
|
||||
sound: settings.options.sound,
|
||||
} as PushoverPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
token: settings.options.accessToken,
|
||||
user: settings.options.userToken,
|
||||
sound: settings.options.sound,
|
||||
} as PushoverPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -255,12 +266,21 @@ class PushoverAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
token: payload.notifyUser.settings.pushoverApplicationToken,
|
||||
user: payload.notifyUser.settings.pushoverUserKey,
|
||||
sound: payload.notifyUser.settings.pushoverSound,
|
||||
} as PushoverPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
token: payload.notifyUser.settings.pushoverApplicationToken,
|
||||
user: payload.notifyUser.settings.pushoverUserKey,
|
||||
sound: payload.notifyUser.settings.pushoverSound,
|
||||
} as PushoverPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -312,11 +332,20 @@ class PushoverAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
token: user.settings.pushoverApplicationToken,
|
||||
user: user.settings.pushoverUserKey,
|
||||
} as PushoverPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
token: user.settings.pushoverApplicationToken,
|
||||
user: user.settings.pushoverUserKey,
|
||||
} as PushoverPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
|
||||
@@ -2,7 +2,6 @@ import { IssueStatus, IssueTypeName } from '@server/constants/issue';
|
||||
import type { NotificationAgentSlack } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import { hasNotificationType, Notification } from '..';
|
||||
import type { NotificationAgent, NotificationPayload } from './agent';
|
||||
import { BaseAgent } from './agent';
|
||||
@@ -238,10 +237,16 @@ class SlackAgent
|
||||
subject: payload.subject,
|
||||
});
|
||||
try {
|
||||
await axios.post(
|
||||
settings.options.webhookUrl,
|
||||
this.buildEmbed(type, payload)
|
||||
);
|
||||
const response = await fetch(settings.options.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(this.buildEmbed(type, payload)),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { User } from '@server/entity/User';
|
||||
import type { NotificationAgentTelegram } from '@server/lib/settings';
|
||||
import { getSettings, NotificationAgentKey } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
hasNotificationType,
|
||||
Notification,
|
||||
@@ -175,11 +174,20 @@ class TelegramAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
chat_id: settings.options.chatId,
|
||||
disable_notification: !!settings.options.sendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
chat_id: settings.options.chatId,
|
||||
disable_notification: !!settings.options.sendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -217,12 +225,21 @@ class TelegramAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
chat_id: payload.notifyUser.settings.telegramChatId,
|
||||
disable_notification:
|
||||
!!payload.notifyUser.settings.telegramSendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
chat_id: payload.notifyUser.settings.telegramChatId,
|
||||
disable_notification:
|
||||
!!payload.notifyUser.settings.telegramSendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
@@ -271,11 +288,20 @@ class TelegramAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(endpoint, {
|
||||
...notificationPayload,
|
||||
chat_id: user.settings.telegramChatId,
|
||||
disable_notification: !!user.settings?.telegramSendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload);
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...notificationPayload,
|
||||
chat_id: user.settings.telegramChatId,
|
||||
disable_notification: !!user.settings?.telegramSendSilently,
|
||||
} as TelegramMessagePayload | TelegramPhotoPayload),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
} catch (e) {
|
||||
let errorData;
|
||||
try {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { MediaStatus } from '@server/constants/media';
|
||||
import type { NotificationAgentWebhook } from '@server/lib/settings';
|
||||
import { getSettings } from '@server/lib/settings';
|
||||
import logger from '@server/logger';
|
||||
import axios from 'axios';
|
||||
import { get } from 'lodash';
|
||||
import { hasNotificationType, Notification } from '..';
|
||||
import type { NotificationAgent, NotificationPayload } from './agent';
|
||||
@@ -178,17 +177,19 @@ class WebhookAgent
|
||||
});
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
settings.options.webhookUrl,
|
||||
this.buildPayload(type, payload),
|
||||
settings.options.authHeader
|
||||
? {
|
||||
headers: {
|
||||
Authorization: settings.options.authHeader,
|
||||
},
|
||||
}
|
||||
: undefined
|
||||
);
|
||||
const response = await fetch(settings.options.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(settings.options.authHeader
|
||||
? { Authorization: settings.options.authHeader }
|
||||
: {}),
|
||||
},
|
||||
body: JSON.stringify(this.buildPayload(type, payload)),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(response.statusText, { cause: response });
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
|
||||
37
server/lib/refreshToken.ts
Normal file
37
server/lib/refreshToken.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import PlexTvAPI from '@server/api/plextv';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import { User } from '@server/entity/User';
|
||||
import logger from '@server/logger';
|
||||
|
||||
class RefreshToken {
|
||||
public async run() {
|
||||
const userRepository = getRepository(User);
|
||||
|
||||
const users = await userRepository
|
||||
.createQueryBuilder('user')
|
||||
.addSelect('user.plexToken')
|
||||
.where("user.plexToken != ''")
|
||||
.getMany();
|
||||
|
||||
for (const user of users) {
|
||||
await this.refreshUserToken(user);
|
||||
}
|
||||
}
|
||||
|
||||
private async refreshUserToken(user: User) {
|
||||
if (!user.plexToken) {
|
||||
logger.warn('Skipping user refresh token for user without plex token', {
|
||||
label: 'Plex Refresh Token',
|
||||
user: user.displayName,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const plexTvApi = new PlexTvAPI(user.plexToken);
|
||||
plexTvApi.pingToken();
|
||||
}
|
||||
}
|
||||
|
||||
const refreshToken = new RefreshToken();
|
||||
|
||||
export default refreshToken;
|
||||
@@ -278,9 +278,7 @@ class PlexScanner
|
||||
const seasons = tvShow.seasons;
|
||||
const processableSeasons: ProcessableSeason[] = [];
|
||||
|
||||
const filteredSeasons = seasons.filter((sn) => sn.season_number !== 0);
|
||||
|
||||
for (const season of filteredSeasons) {
|
||||
for (const season of seasons) {
|
||||
const matchedPlexSeason = metadata.Children?.Metadata.find(
|
||||
(md) => Number(md.index) === season.season_number
|
||||
);
|
||||
|
||||
@@ -103,10 +103,8 @@ class SonarrScanner
|
||||
|
||||
const tmdbId = tvShow.id;
|
||||
|
||||
const filteredSeasons = sonarrSeries.seasons.filter(
|
||||
(sn) =>
|
||||
sn.seasonNumber !== 0 &&
|
||||
tvShow.seasons.find((s) => s.season_number === sn.seasonNumber)
|
||||
const filteredSeasons = sonarrSeries.seasons.filter((sn) =>
|
||||
tvShow.seasons.find((s) => s.season_number === sn.seasonNumber)
|
||||
);
|
||||
|
||||
for (const season of filteredSeasons) {
|
||||
|
||||
@@ -170,6 +170,7 @@ export interface NotificationAgentDiscord extends NotificationAgentConfig {
|
||||
botUsername?: string;
|
||||
botAvatarUrl?: string;
|
||||
webhookUrl: string;
|
||||
webhookRoleId?: string;
|
||||
enableMentions: boolean;
|
||||
};
|
||||
}
|
||||
@@ -281,6 +282,7 @@ export type JobId =
|
||||
| 'plex-recently-added-scan'
|
||||
| 'plex-full-scan'
|
||||
| 'plex-watchlist-sync'
|
||||
| 'plex-refresh-token'
|
||||
| 'radarr-scan'
|
||||
| 'sonarr-scan'
|
||||
| 'download-sync'
|
||||
@@ -394,6 +396,7 @@ class Settings {
|
||||
types: 0,
|
||||
options: {
|
||||
webhookUrl: '',
|
||||
webhookRoleId: '',
|
||||
enableMentions: true,
|
||||
},
|
||||
},
|
||||
@@ -467,7 +470,10 @@ class Settings {
|
||||
schedule: '0 0 3 * * *',
|
||||
},
|
||||
'plex-watchlist-sync': {
|
||||
schedule: '0 */10 * * * *',
|
||||
schedule: '0 */3 * * * *',
|
||||
},
|
||||
'plex-refresh-token': {
|
||||
schedule: '0 0 5 * * *',
|
||||
},
|
||||
'radarr-scan': {
|
||||
schedule: '0 0 4 * * *',
|
||||
|
||||
@@ -62,7 +62,7 @@ class WatchlistSync {
|
||||
|
||||
const plexTvApi = new PlexTvAPI(user.plexToken);
|
||||
|
||||
const response = await plexTvApi.getWatchlist({ size: 200 });
|
||||
const response = await plexTvApi.getWatchlist({ size: 20 });
|
||||
|
||||
const mediaItems = await Media.getRelatedMedia(
|
||||
user,
|
||||
|
||||
304
server/migration/postgres/1705599190375-InitialMigration.ts
Normal file
304
server/migration/postgres/1705599190375-InitialMigration.ts
Normal file
@@ -0,0 +1,304 @@
|
||||
import type { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class InitialMigration1705599190375 implements MigrationInterface {
|
||||
name = 'InitialMigration1705599190375';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`create table if not exists session
|
||||
(
|
||||
"expiredAt" bigint,
|
||||
id text,
|
||||
json text
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create index if not exists "idx_194703_IDX_28c5d1d16da7908c97c9bc2f74"
|
||||
on session ("expiredAt");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194703_sqlite_autoindex_session_1
|
||||
on session (id);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists media
|
||||
(
|
||||
id serial,
|
||||
"mediaType" text,
|
||||
"tmdbId" int,
|
||||
"tvdbId" int,
|
||||
"imdbId" text,
|
||||
status int default '1'::int,
|
||||
status4k int default '1'::int,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"lastSeasonChange" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"mediaAddedAt" timestamp with time zone,
|
||||
"serviceId" int,
|
||||
"serviceId4k" int,
|
||||
"externalServiceId" int,
|
||||
"externalServiceId4k" int,
|
||||
"externalServiceSlug" text,
|
||||
"externalServiceSlug4k" text,
|
||||
"ratingKey" text,
|
||||
"ratingKey4k" text,
|
||||
"jellyfinMediaId" text,
|
||||
"jellyfinMediaId4k" text,
|
||||
constraint idx_194722_media_pkey
|
||||
primary key (id)
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists season
|
||||
(
|
||||
id serial,
|
||||
"seasonNumber" int,
|
||||
status int default '1'::int,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"mediaId" int not null,
|
||||
status4k int default '1'::int,
|
||||
constraint idx_194715_season_pkey
|
||||
primary key (id),
|
||||
foreign key ("mediaId") references media
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create index if not exists "idx_194722_IDX_7ff2d11f6a83cb52386eaebe74"
|
||||
on media ("imdbId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create index if not exists "idx_194722_IDX_41a289eb1fa489c1bc6f38d9c3"
|
||||
on media ("tvdbId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create index if not exists "idx_194722_IDX_7157aad07c73f6a6ae3bbd5ef5"
|
||||
on media ("tmdbId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194722_sqlite_autoindex_media_1
|
||||
on media ("tvdbId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists "user"
|
||||
(
|
||||
id serial,
|
||||
email text,
|
||||
username text,
|
||||
"plexId" int,
|
||||
"plexToken" text,
|
||||
permissions int default '0'::int,
|
||||
avatar text,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
password text,
|
||||
"userType" int default '1'::int,
|
||||
"plexUsername" text,
|
||||
"resetPasswordGuid" text,
|
||||
"recoveryLinkExpirationDate" date,
|
||||
"movieQuotaLimit" int,
|
||||
"movieQuotaDays" int,
|
||||
"tvQuotaLimit" int,
|
||||
"tvQuotaDays" int,
|
||||
"jellyfinUsername" text,
|
||||
"jellyfinAuthToken" text,
|
||||
"jellyfinUserId" text,
|
||||
"jellyfinDeviceId" text,
|
||||
constraint idx_194731_user_pkey
|
||||
primary key (id)
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194731_sqlite_autoindex_user_1
|
||||
on "user" (email);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists user_push_subscription
|
||||
(
|
||||
id serial,
|
||||
endpoint text,
|
||||
p256dh text,
|
||||
auth text,
|
||||
"userId" int,
|
||||
constraint idx_194740_user_push_subscription_pkey
|
||||
primary key (id),
|
||||
foreign key ("userId") references "user"
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194740_sqlite_autoindex_user_push_subscription_1
|
||||
on user_push_subscription (auth);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists issue
|
||||
(
|
||||
id serial,
|
||||
"issueType" int,
|
||||
status int default '1'::int,
|
||||
"problemSeason" int default '0'::int,
|
||||
"problemEpisode" int default '0'::int,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"mediaId" int not null,
|
||||
"createdById" int,
|
||||
"modifiedById" int,
|
||||
constraint idx_194747_issue_pkey
|
||||
primary key (id),
|
||||
foreign key ("modifiedById") references "user"
|
||||
on delete cascade,
|
||||
foreign key ("createdById") references "user"
|
||||
on delete cascade,
|
||||
foreign key ("mediaId") references media
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists issue_comment
|
||||
(
|
||||
id serial,
|
||||
message text,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"userId" int,
|
||||
"issueId" int,
|
||||
constraint idx_194755_issue_comment_pkey
|
||||
primary key (id),
|
||||
foreign key ("issueId") references issue
|
||||
on delete cascade,
|
||||
foreign key ("userId") references "user"
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists user_settings
|
||||
(
|
||||
id serial,
|
||||
"notificationTypes" text,
|
||||
"discordId" text,
|
||||
"userId" int,
|
||||
region text,
|
||||
"originalLanguage" text,
|
||||
"telegramChatId" text,
|
||||
"telegramSendSilently" boolean,
|
||||
"pgpKey" text,
|
||||
locale text default ''::text,
|
||||
"pushbulletAccessToken" text,
|
||||
"pushoverApplicationToken" text,
|
||||
"pushoverUserKey" text,
|
||||
"watchlistSyncMovies" boolean,
|
||||
"watchlistSyncTv" boolean,
|
||||
"pushoverSound" varchar,
|
||||
constraint idx_194762_user_settings_pkey
|
||||
primary key (id),
|
||||
foreign key ("userId") references "user"
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194762_sqlite_autoindex_user_settings_1
|
||||
on user_settings ("userId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists media_request
|
||||
(
|
||||
id serial,
|
||||
status int,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
type text,
|
||||
"mediaId" int not null,
|
||||
"requestedById" int,
|
||||
"modifiedById" int,
|
||||
is4k boolean default false,
|
||||
"serverId" int,
|
||||
"profileId" int,
|
||||
"rootFolder" text,
|
||||
"languageProfileId" int,
|
||||
tags text,
|
||||
"isAutoRequest" boolean default false,
|
||||
constraint idx_194770_media_request_pkey
|
||||
primary key (id),
|
||||
foreign key ("modifiedById") references "user"
|
||||
on delete set null,
|
||||
foreign key ("requestedById") references "user"
|
||||
on delete cascade,
|
||||
foreign key ("mediaId") references media
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists season_request
|
||||
(
|
||||
id serial NOT NULL,
|
||||
"seasonNumber" int,
|
||||
status int default '1'::int,
|
||||
"createdAt" timestamp with time zone default now(),
|
||||
"updatedAt" timestamp with time zone default now(),
|
||||
"requestId" int,
|
||||
constraint idx_194709_season_request_pkey
|
||||
primary key (id),
|
||||
foreign key ("requestId") references media_request
|
||||
on delete cascade
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists discover_slider
|
||||
(
|
||||
id serial,
|
||||
type integer,
|
||||
"order" integer,
|
||||
"isBuiltIn" boolean default false,
|
||||
enabled boolean default true,
|
||||
title text,
|
||||
data text,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
constraint idx_194779_discover_slider_pkey
|
||||
primary key (id)
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create table if not exists watchlist
|
||||
(
|
||||
id serial,
|
||||
"ratingKey" text,
|
||||
"mediaType" text,
|
||||
title text,
|
||||
"tmdbId" int,
|
||||
"createdAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp with time zone default CURRENT_TIMESTAMP,
|
||||
"requestedById" int,
|
||||
"mediaId" int not null,
|
||||
constraint idx_194788_watchlist_pkey
|
||||
primary key (id)
|
||||
);`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create index if not exists "idx_194788_IDX_939f205946256cc0d2a1ac51a8"
|
||||
on watchlist ("tmdbId");`
|
||||
);
|
||||
await queryRunner.query(
|
||||
`create unique index if not exists idx_194788_sqlite_autoindex_watchlist_1
|
||||
on watchlist ("tmdbId", "requestedById");`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`drop table if exists session cascade`);
|
||||
await queryRunner.query(`drop table if exists season_request cascade`);
|
||||
await queryRunner.query(`drop table if exists season cascade`);
|
||||
await queryRunner.query(
|
||||
`drop table if exists user_push_subscription cascade`
|
||||
);
|
||||
await queryRunner.query(`drop table if exists issue_comment cascade`);
|
||||
await queryRunner.query(`drop table if exists issue cascade`);
|
||||
await queryRunner.query(`drop table if exists user_settings cascade`);
|
||||
await queryRunner.query(`drop table if exists media_request cascade`);
|
||||
await queryRunner.query(`drop table if exists media cascade`);
|
||||
await queryRunner.query(`drop table if exists "user" cascade`);
|
||||
await queryRunner.query(`drop table if exists discover_slider cascade`);
|
||||
await queryRunner.query(`drop table if exists watchlist cascade`);
|
||||
}
|
||||
}
|
||||
32
server/migration/postgres/1730770837441-AddBlacklist.ts
Normal file
32
server/migration/postgres/1730770837441-AddBlacklist.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddBlacklist1730770837441 implements MigrationInterface {
|
||||
name = 'AddBlacklist1730770837441';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "blacklist"
|
||||
(
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"mediaType" VARCHAR NOT NULL,
|
||||
"title" VARCHAR,
|
||||
"tmdbId" INTEGER NOT NULL,
|
||||
"createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
"userId" INTEGER,
|
||||
"mediaId" INTEGER,
|
||||
CONSTRAINT "UQ_6bbafa28411e6046421991ea21c" UNIQUE ("tmdbId", "userId")
|
||||
)`
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_6bbafa28411e6046421991ea21" ON "blacklist" ("tmdbId")`
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`DROP INDEX IF EXISTS "IDX_6bbafa28411e6046421991ea21"`
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "blacklist"`);
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,12 @@ import { MediaType } from '@server/constants/media';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import { Blacklist } from '@server/entity/Blacklist';
|
||||
import Media from '@server/entity/Media';
|
||||
import { NotFoundError } from '@server/entity/Watchlist';
|
||||
import type { BlacklistResultsResponse } from '@server/interfaces/api/blacklistInterfaces';
|
||||
import { Permission } from '@server/lib/permissions';
|
||||
import logger from '@server/logger';
|
||||
import { isAuthenticated } from '@server/middleware/auth';
|
||||
import { Router } from 'express';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import { QueryFailedError } from 'typeorm';
|
||||
import { EntityNotFoundError, QueryFailedError } from 'typeorm';
|
||||
import { z } from 'zod';
|
||||
|
||||
const blacklistRoutes = Router();
|
||||
@@ -26,7 +24,6 @@ blacklistRoutes.get(
|
||||
isAuthenticated([Permission.MANAGE_BLACKLIST, Permission.VIEW_BLACKLIST], {
|
||||
type: 'or',
|
||||
}),
|
||||
rateLimit({ windowMs: 60 * 1000, max: 50 }),
|
||||
async (req, res, next) => {
|
||||
const pageSize = req.query.take ? Number(req.query.take) : 25;
|
||||
const skip = req.query.skip ? Number(req.query.skip) : 0;
|
||||
@@ -71,6 +68,32 @@ blacklistRoutes.get(
|
||||
}
|
||||
);
|
||||
|
||||
blacklistRoutes.get(
|
||||
'/:id',
|
||||
isAuthenticated([Permission.MANAGE_BLACKLIST], {
|
||||
type: 'or',
|
||||
}),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const blacklisteRepository = getRepository(Blacklist);
|
||||
|
||||
const blacklistItem = await blacklisteRepository.findOneOrFail({
|
||||
where: { tmdbId: Number(req.params.id) },
|
||||
});
|
||||
|
||||
return res.status(200).send(blacklistItem);
|
||||
} catch (e) {
|
||||
if (e instanceof EntityNotFoundError) {
|
||||
return next({
|
||||
status: 401,
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
return next({ status: 500, message: e.message });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
blacklistRoutes.post(
|
||||
'/',
|
||||
isAuthenticated([Permission.MANAGE_BLACKLIST], {
|
||||
@@ -134,7 +157,7 @@ blacklistRoutes.delete(
|
||||
|
||||
return res.status(204).send();
|
||||
} catch (e) {
|
||||
if (e instanceof NotFoundError) {
|
||||
if (e instanceof EntityNotFoundError) {
|
||||
return next({
|
||||
status: 401,
|
||||
message: e.message,
|
||||
|
||||
@@ -5,7 +5,6 @@ import { Router } from 'express';
|
||||
const router = Router();
|
||||
const tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', {
|
||||
rateLimitOptions: {
|
||||
maxRequests: 20,
|
||||
maxRPS: 50,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -113,7 +113,7 @@ requestRoutes.get<Record<string, unknown>, RequestResultsResponse>(
|
||||
requestStatus: statusFilter,
|
||||
})
|
||||
.andWhere(
|
||||
'((request.is4k = 0 AND media.status IN (:...mediaStatus)) OR (request.is4k = 1 AND media.status4k IN (:...mediaStatus)))',
|
||||
'((request.is4k = false AND media.status IN (:...mediaStatus)) OR (request.is4k = true AND media.status4k IN (:...mediaStatus)))',
|
||||
{
|
||||
mediaStatus: mediaStatusFilter,
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ router.get('/', async (req, res, next) => {
|
||||
`CASE WHEN (user.username IS NULL OR user.username = '') THEN (
|
||||
CASE WHEN (user.plexUsername IS NULL OR user.plexUsername = '') THEN (
|
||||
CASE WHEN (user.jellyfinUsername IS NULL OR user.jellyfinUsername = '') THEN
|
||||
user.email
|
||||
"user"."email"
|
||||
ELSE
|
||||
LOWER(user.jellyfinUsername)
|
||||
END)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { ApiErrorCode } from '@server/constants/error';
|
||||
import { UserType } from '@server/constants/user';
|
||||
import { getRepository } from '@server/datasource';
|
||||
import { User } from '@server/entity/User';
|
||||
import { UserSettings } from '@server/entity/UserSettings';
|
||||
@@ -99,11 +100,29 @@ userSettingsRoutes.post<
|
||||
});
|
||||
}
|
||||
|
||||
user.username = req.body.username;
|
||||
const oldEmail = user.email;
|
||||
const oldUsername = user.username;
|
||||
user.username = req.body.username;
|
||||
if (user.jellyfinUsername) {
|
||||
user.email = req.body.email || user.jellyfinUsername || user.email;
|
||||
}
|
||||
// Edge case for local users, because they have no Jellyfin username to fall back on
|
||||
// if the email is not provided
|
||||
if (user.userType === UserType.LOCAL) {
|
||||
if (req.body.email) {
|
||||
user.email = req.body.email;
|
||||
if (
|
||||
!user.username &&
|
||||
user.email !== oldEmail &&
|
||||
!oldEmail.includes('@')
|
||||
) {
|
||||
user.username = oldEmail;
|
||||
}
|
||||
} else if (req.body.username) {
|
||||
user.email = oldUsername || user.email;
|
||||
user.username = req.body.username;
|
||||
}
|
||||
}
|
||||
|
||||
const existingUser = await userRepository.findOne({
|
||||
where: { email: user.email },
|
||||
|
||||
20
server/utils/DbColumnHelper.ts
Normal file
20
server/utils/DbColumnHelper.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { isPgsql } from '@server/datasource';
|
||||
import type { ColumnOptions, ColumnType } from 'typeorm';
|
||||
import { Column } from 'typeorm';
|
||||
const pgTypeMapping: { [key: string]: ColumnType } = {
|
||||
datetime: 'timestamp with time zone',
|
||||
};
|
||||
|
||||
export function resolveDbType(pgType: ColumnType): ColumnType {
|
||||
if (isPgsql && pgType.toString() in pgTypeMapping) {
|
||||
return pgTypeMapping[pgType.toString()];
|
||||
}
|
||||
return pgType;
|
||||
}
|
||||
|
||||
export function DbAwareColumn(columnOptions: ColumnOptions) {
|
||||
if (columnOptions.type) {
|
||||
columnOptions.type = resolveDbType(columnOptions.type);
|
||||
}
|
||||
return Column(columnOptions);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user