Fix config inconsisten schema

This commit is contained in:
Xoconoch
2025-08-11 09:30:28 -06:00
parent fae24489ef
commit de1e44e944
9 changed files with 356 additions and 328 deletions

View File

@@ -1,7 +1,7 @@
fastapi==0.116.1
uvicorn[standard]==0.35.0
celery==5.5.3
deezspot-spotizerr==2.2.6
deezspot-spotizerr==2.3.0
httpx==0.28.1
bcrypt==4.2.1
PyJWT==2.10.1

View File

@@ -1,11 +1,8 @@
from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi.responses import JSONResponse
import json
import logging
import os
from typing import Any, Optional, List
from pathlib import Path
from pydantic import BaseModel
from typing import Any
# Import the centralized config getters that handle file creation and defaults
from routes.utils.celery_config import (
@@ -16,12 +13,11 @@ from routes.utils.celery_config import (
from routes.utils.watch.manager import (
get_watch_config as get_watch_manager_config,
DEFAULT_WATCH_CONFIG,
CONFIG_FILE_PATH as WATCH_CONFIG_FILE_PATH,
MAIN_CONFIG_FILE_PATH as WATCH_MAIN_CONFIG_FILE_PATH,
)
# Import authentication dependencies
from routes.auth.middleware import require_admin_from_state, User
from routes.auth import AUTH_ENABLED, DISABLE_REGISTRATION
# Import credential utilities (DB-backed)
from routes.utils.credentials import list_credentials, _get_global_spotify_api_creds
@@ -75,37 +71,45 @@ def validate_config(config_data: dict, watch_config: dict = None) -> tuple[bool,
# Get current watch config if not provided
if watch_config is None:
watch_config = get_watch_config_http()
# Check if fallback is enabled but missing required accounts
if config_data.get("fallback", False):
has_spotify = has_credentials("spotify")
has_deezer = has_credentials("deezer")
if not has_spotify or not has_deezer:
missing_services = []
if not has_spotify:
missing_services.append("Spotify")
if not has_deezer:
missing_services.append("Deezer")
return False, f"Download Fallback requires accounts to be configured for both services. Missing: {', '.join(missing_services)}. Configure accounts before enabling fallback."
return (
False,
f"Download Fallback requires accounts to be configured for both services. Missing: {', '.join(missing_services)}. Configure accounts before enabling fallback.",
)
# Check if watch is enabled but no download methods are available
if watch_config.get("enabled", False):
real_time = config_data.get("realTime", False)
fallback = config_data.get("fallback", False)
if not real_time and not fallback:
return False, "Watch functionality requires either Real-time downloading or Download Fallback to be enabled."
return (
False,
"Watch functionality requires either Real-time downloading or Download Fallback to be enabled.",
)
return True, ""
except Exception as e:
logger.error(f"Error validating configuration: {e}", exc_info=True)
return False, f"Configuration validation error: {str(e)}"
def validate_watch_config(watch_data: dict, main_config: dict = None) -> tuple[bool, str]:
def validate_watch_config(
watch_data: dict, main_config: dict = None
) -> tuple[bool, str]:
"""
Validate watch configuration for consistency and requirements.
Returns (is_valid, error_message).
@@ -114,31 +118,37 @@ def validate_watch_config(watch_data: dict, main_config: dict = None) -> tuple[b
# Get current main config if not provided
if main_config is None:
main_config = get_config()
# Check if trying to enable watch without download methods
if watch_data.get("enabled", False):
real_time = main_config.get("realTime", False)
fallback = main_config.get("fallback", False)
if not real_time and not fallback:
return False, "Cannot enable watch: either Real-time downloading or Download Fallback must be enabled in download settings."
return (
False,
"Cannot enable watch: either Real-time downloading or Download Fallback must be enabled in download settings.",
)
# If fallback is enabled, check for required accounts
if fallback:
has_spotify = has_credentials("spotify")
has_deezer = has_credentials("deezer")
if not has_spotify or not has_deezer:
missing_services = []
if not has_spotify:
missing_services.append("Spotify")
if not has_deezer:
missing_services.append("Deezer")
return False, f"Cannot enable watch with fallback: missing accounts for {', '.join(missing_services)}. Configure accounts before enabling watch."
return (
False,
f"Cannot enable watch with fallback: missing accounts for {', '.join(missing_services)}. Configure accounts before enabling watch.",
)
return True, ""
except Exception as e:
logger.error(f"Error validating watch configuration: {e}", exc_info=True)
return False, f"Watch configuration validation error: {str(e)}"
@@ -150,7 +160,27 @@ def get_config():
return get_main_config_params()
# Helper to save main config
def _migrate_legacy_keys_inplace(cfg: dict) -> bool:
"""Migrate legacy snake_case keys in the main config to camelCase. Returns True if modified."""
legacy_map = {
"tracknum_padding": "tracknumPadding",
"save_cover": "saveCover",
"retry_delay_increase": "retryDelayIncrease",
"artist_separator": "artistSeparator",
"recursive_quality": "recursiveQuality",
}
modified = False
for legacy, camel in legacy_map.items():
if legacy in cfg and camel not in cfg:
cfg[camel] = cfg.pop(legacy)
modified = True
# Ensure watch block exists and migrate inside watch defaults handled in manager.get_watch_config
if "watch" not in cfg or not isinstance(cfg.get("watch"), dict):
cfg["watch"] = DEFAULT_WATCH_CONFIG.copy()
modified = True
return modified
def save_config(config_data):
"""Saves the main configuration data to main.json."""
try:
@@ -167,6 +197,10 @@ def save_config(config_data):
for key, value in config_data.items():
existing_config[key] = value
# Migration: unify legacy keys to camelCase
if _migrate_legacy_keys_inplace(existing_config):
logger.info("Migrated legacy config keys to camelCase.")
# Ensure all default keys are still there
for default_key, default_value in DEFAULT_MAIN_CONFIG.items():
if default_key not in existing_config:
@@ -181,39 +215,37 @@ def save_config(config_data):
return False, str(e)
# Helper to get watch config (uses the one from watch/manager.py)
def get_watch_config_http(): # Renamed to avoid conflict with the imported get_watch_config
"""Retrieves the watch configuration, creating it with defaults if necessary."""
def get_watch_config_http():
"""Retrieves the watch configuration from main.json watch key."""
return get_watch_manager_config()
# Helper to save watch config
def save_watch_config_http(watch_config_data): # Renamed
"""Saves the watch configuration data to watch.json."""
def save_watch_config_http(watch_config_data):
"""Saves the watch configuration data to the 'watch' key in main.json."""
try:
WATCH_CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
# Similar logic to save_config: merge with defaults/existing
existing_config = {}
if WATCH_CONFIG_FILE_PATH.exists():
with open(WATCH_CONFIG_FILE_PATH, "r") as f_read:
existing_config = json.load(f_read)
else: # Should be rare if get_watch_manager_config was called
existing_config = DEFAULT_WATCH_CONFIG.copy()
for key, value in watch_config_data.items():
existing_config[key] = value
for default_key, default_value in DEFAULT_WATCH_CONFIG.items():
if default_key not in existing_config:
existing_config[default_key] = default_value
with open(WATCH_CONFIG_FILE_PATH, "w") as f:
json.dump(existing_config, f, indent=4)
logger.info(f"Watch configuration saved to {WATCH_CONFIG_FILE_PATH}")
WATCH_MAIN_CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
if WATCH_MAIN_CONFIG_FILE_PATH.exists():
with open(WATCH_MAIN_CONFIG_FILE_PATH, "r") as f:
main_cfg = json.load(f) or {}
else:
main_cfg = DEFAULT_MAIN_CONFIG.copy()
current_watch = (main_cfg.get("watch") or {}).copy()
current_watch.update(watch_config_data or {})
# Ensure defaults
for k, v in DEFAULT_WATCH_CONFIG.items():
if k not in current_watch:
current_watch[k] = v
main_cfg["watch"] = current_watch
# Migrate legacy main keys as well
_migrate_legacy_keys_inplace(main_cfg)
with open(WATCH_MAIN_CONFIG_FILE_PATH, "w") as f:
json.dump(main_cfg, f, indent=4)
logger.info("Watch configuration updated in main.json under 'watch'.")
return True, None
except Exception as e:
logger.error(f"Error saving watch configuration: {e}", exc_info=True)
logger.error(
f"Error saving watch configuration to main.json: {e}", exc_info=True
)
return False, str(e)
@@ -228,7 +260,7 @@ async def handle_config(current_user: User = Depends(require_admin_from_state)):
logger.error(f"Error in GET /config: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to retrieve configuration", "details": str(e)}
detail={"error": "Failed to retrieve configuration", "details": str(e)},
)
@@ -236,12 +268,16 @@ async def handle_config(current_user: User = Depends(require_admin_from_state)):
@router.put("/")
@router.post("")
@router.put("")
async def update_config(request: Request, current_user: User = Depends(require_admin_from_state)):
async def update_config(
request: Request, current_user: User = Depends(require_admin_from_state)
):
"""Handles POST/PUT requests to update the main configuration."""
try:
new_config = await request.json()
if not isinstance(new_config, dict):
raise HTTPException(status_code=400, detail={"error": "Invalid config format"})
raise HTTPException(
status_code=400, detail={"error": "Invalid config format"}
)
# Preserve the explicitFilter setting from environment
explicit_filter_env = os.environ.get("EXPLICIT_FILTER", "false").lower()
@@ -252,7 +288,10 @@ async def update_config(request: Request, current_user: User = Depends(require_a
if not is_valid:
raise HTTPException(
status_code=400,
detail={"error": "Configuration validation failed", "details": error_message}
detail={
"error": "Configuration validation failed",
"details": error_message,
},
)
success, error_msg = save_config(new_config)
@@ -264,14 +303,17 @@ async def update_config(request: Request, current_user: User = Depends(require_a
# and get_config handles errors by returning a default or None.
raise HTTPException(
status_code=500,
detail={"error": "Failed to retrieve configuration after saving"}
detail={"error": "Failed to retrieve configuration after saving"},
)
return updated_config_values
else:
raise HTTPException(
status_code=500,
detail={"error": "Failed to update configuration", "details": error_msg}
detail={
"error": "Failed to update configuration",
"details": error_msg,
},
)
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail={"error": "Invalid JSON data"})
@@ -281,7 +323,7 @@ async def update_config(request: Request, current_user: User = Depends(require_a
logger.error(f"Error in POST/PUT /config: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to update configuration", "details": str(e)}
detail={"error": "Failed to update configuration", "details": str(e)},
)
@@ -297,59 +339,70 @@ async def check_config_changes(current_user: User = Depends(require_admin_from_s
logger.error(f"Error in GET /config/check: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to check configuration", "details": str(e)}
detail={"error": "Failed to check configuration", "details": str(e)},
)
@router.post("/validate")
async def validate_config_endpoint(request: Request, current_user: User = Depends(require_admin_from_state)):
async def validate_config_endpoint(
request: Request, current_user: User = Depends(require_admin_from_state)
):
"""Validate configuration without saving it."""
try:
config_data = await request.json()
if not isinstance(config_data, dict):
raise HTTPException(status_code=400, detail={"error": "Invalid config format"})
raise HTTPException(
status_code=400, detail={"error": "Invalid config format"}
)
is_valid, error_message = validate_config(config_data)
return {
"valid": is_valid,
"message": "Configuration is valid" if is_valid else error_message,
"details": error_message if not is_valid else None
"details": error_message if not is_valid else None,
}
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail={"error": "Invalid JSON data"})
except Exception as e:
logger.error(f"Error in POST /config/validate: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to validate configuration", "details": str(e)}
detail={"error": "Failed to validate configuration", "details": str(e)},
)
@router.post("/watch/validate")
async def validate_watch_config_endpoint(request: Request, current_user: User = Depends(require_admin_from_state)):
async def validate_watch_config_endpoint(
request: Request, current_user: User = Depends(require_admin_from_state)
):
"""Validate watch configuration without saving it."""
try:
watch_data = await request.json()
if not isinstance(watch_data, dict):
raise HTTPException(status_code=400, detail={"error": "Invalid watch config format"})
raise HTTPException(
status_code=400, detail={"error": "Invalid watch config format"}
)
is_valid, error_message = validate_watch_config(watch_data)
return {
"valid": is_valid,
"message": "Watch configuration is valid" if is_valid else error_message,
"details": error_message if not is_valid else None
"details": error_message if not is_valid else None,
}
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail={"error": "Invalid JSON data"})
except Exception as e:
logger.error(f"Error in POST /config/watch/validate: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to validate watch configuration", "details": str(e)}
detail={
"error": "Failed to validate watch configuration",
"details": str(e),
},
)
@@ -363,25 +416,35 @@ async def handle_watch_config(current_user: User = Depends(require_admin_from_st
logger.error(f"Error in GET /config/watch: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to retrieve watch configuration", "details": str(e)}
detail={
"error": "Failed to retrieve watch configuration",
"details": str(e),
},
)
@router.post("/watch")
@router.put("/watch")
async def update_watch_config(request: Request, current_user: User = Depends(require_admin_from_state)):
async def update_watch_config(
request: Request, current_user: User = Depends(require_admin_from_state)
):
"""Handles POST/PUT requests to update the watch configuration."""
try:
new_watch_config = await request.json()
if not isinstance(new_watch_config, dict):
raise HTTPException(status_code=400, detail={"error": "Invalid watch config format"})
raise HTTPException(
status_code=400, detail={"error": "Invalid watch config format"}
)
# Validate watch configuration before saving
is_valid, error_message = validate_watch_config(new_watch_config)
if not is_valid:
raise HTTPException(
status_code=400,
detail={"error": "Watch configuration validation failed", "details": error_message}
detail={
"error": "Watch configuration validation failed",
"details": error_message,
},
)
success, error_msg = save_watch_config_http(new_watch_config)
@@ -390,15 +453,20 @@ async def update_watch_config(request: Request, current_user: User = Depends(req
else:
raise HTTPException(
status_code=500,
detail={"error": "Failed to update watch configuration", "details": error_msg}
detail={
"error": "Failed to update watch configuration",
"details": error_msg,
},
)
except json.JSONDecodeError:
raise HTTPException(status_code=400, detail={"error": "Invalid JSON data for watch config"})
raise HTTPException(
status_code=400, detail={"error": "Invalid JSON data for watch config"}
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error in POST/PUT /config/watch: {e}", exc_info=True)
raise HTTPException(
status_code=500,
detail={"error": "Failed to update watch configuration", "details": str(e)}
detail={"error": "Failed to update watch configuration", "details": str(e)},
)

View File

@@ -36,18 +36,42 @@ DEFAULT_MAIN_CONFIG = {
"realTime": False,
"customDirFormat": "%ar_album%/%album%",
"customTrackFormat": "%tracknum%. %music%",
"tracknum_padding": True,
"save_cover": True,
"tracknumPadding": True,
"saveCover": True,
"maxConcurrentDownloads": 3,
"maxRetries": 3,
"retryDelaySeconds": 5,
"retry_delay_increase": 5,
"retryDelayIncrease": 5,
"convertTo": None,
"bitrate": None,
"artist_separator": "; ",
"artistSeparator": "; ",
"recursiveQuality": False,
"watch": {},
}
def _migrate_legacy_keys(cfg: dict) -> tuple[dict, bool]:
"""Return a new config dict with legacy snake_case keys migrated to camelCase. Also normalizes nested watch if present."""
migrated = False
out = dict(cfg)
legacy_map = {
"tracknum_padding": "tracknumPadding",
"save_cover": "saveCover",
"retry_delay_increase": "retryDelayIncrease",
"artist_separator": "artistSeparator",
"recursive_quality": "recursiveQuality",
}
for legacy, camel in legacy_map.items():
if legacy in out and camel not in out:
out[camel] = out.pop(legacy)
migrated = True
# Ensure watch exists
if "watch" not in out or not isinstance(out.get("watch"), dict):
out["watch"] = {}
migrated = True
return out, migrated
def get_config_params():
"""
Get configuration parameters from the config file.
@@ -68,7 +92,10 @@ def get_config_params():
return DEFAULT_MAIN_CONFIG.copy() # Return a copy of defaults
with open(CONFIG_FILE_PATH, "r") as f:
config = json.load(f)
loaded = json.load(f) or {}
# Migrate legacy keys
config, migrated = _migrate_legacy_keys(loaded)
# Ensure all default keys are present in the loaded config
updated = False
@@ -77,9 +104,9 @@ def get_config_params():
config[key] = value
updated = True
if updated:
if updated or migrated:
logger.info(
f"Configuration at {CONFIG_FILE_PATH} was missing some default keys. Updated with defaults."
f"Configuration at {CONFIG_FILE_PATH} updated (defaults{' and migration' if migrated else ''})."
)
with open(CONFIG_FILE_PATH, "w") as f:
json.dump(config, f, indent=4)
@@ -99,7 +126,7 @@ config_params_values = get_config_params() # Renamed to avoid conflict with mod
MAX_CONCURRENT_DL = config_params_values.get("maxConcurrentDownloads", 3)
MAX_RETRIES = config_params_values.get("maxRetries", 3)
RETRY_DELAY = config_params_values.get("retryDelaySeconds", 5)
RETRY_DELAY_INCREASE = config_params_values.get("retry_delay_increase", 5)
RETRY_DELAY_INCREASE = config_params_values.get("retryDelayIncrease", 5)
# Define task queues
task_queues = {

View File

@@ -53,15 +53,24 @@ def get_config_params():
"realTime": config.get("realTime", False),
"customDirFormat": config.get("customDirFormat", "%ar_album%/%album%"),
"customTrackFormat": config.get("customTrackFormat", "%tracknum%. %music%"),
"tracknum_padding": config.get("tracknum_padding", True),
"save_cover": config.get("save_cover", True),
"tracknumPadding": config.get(
"tracknumPadding", config.get("tracknum_padding", True)
),
"saveCover": config.get("saveCover", config.get("save_cover", True)),
"maxRetries": config.get("maxRetries", 3),
"retryDelaySeconds": config.get("retryDelaySeconds", 5),
"retry_delay_increase": config.get("retry_delay_increase", 5),
"retryDelayIncrease": config.get(
"retryDelayIncrease", config.get("retry_delay_increase", 5)
),
"convertTo": config.get("convertTo", None),
"bitrate": config.get("bitrate", None),
"artist_separator": config.get("artist_separator", "; "),
"recursive_quality": config.get("recursive_quality", False),
"artistSeparator": config.get(
"artistSeparator", config.get("artist_separator", "; ")
),
"recursiveQuality": config.get(
"recursiveQuality", config.get("recursive_quality", False)
),
"watch": config.get("watch", {}),
}
except Exception as e:
logger.error(f"Error reading config for parameters: {e}")
@@ -75,15 +84,16 @@ def get_config_params():
"realTime": False,
"customDirFormat": "%ar_album%/%album%",
"customTrackFormat": "%tracknum%. %music%",
"tracknum_padding": True,
"save_cover": True,
"tracknumPadding": True,
"saveCover": True,
"maxRetries": 3,
"retryDelaySeconds": 5,
"retry_delay_increase": 5,
"retryDelayIncrease": 5,
"convertTo": None, # Default for conversion
"bitrate": None, # Default for bitrate
"artist_separator": "; ",
"recursive_quality": False,
"artistSeparator": "; ",
"recursiveQuality": False,
"watch": {},
}
@@ -381,10 +391,10 @@ class CeleryDownloadQueueManager:
),
"pad_tracks": self._parse_bool_param(
original_request.get("tracknum_padding"),
config_params["tracknum_padding"],
config_params["tracknumPadding"],
),
"save_cover": self._parse_bool_param(
original_request.get("save_cover"), config_params["save_cover"]
original_request.get("save_cover"), config_params["saveCover"]
),
"convertTo": original_request.get(
"convertTo", config_params.get("convertTo")
@@ -393,11 +403,11 @@ class CeleryDownloadQueueManager:
"bitrate", config_params.get("bitrate")
),
"artist_separator": original_request.get(
"artist_separator", config_params.get("artist_separator", "; ")
"artist_separator", config_params.get("artistSeparator", "; ")
),
"recursive_quality": self._parse_bool_param(
original_request.get("recursive_quality"),
config_params.get("recursive_quality", False),
config_params.get("recursiveQuality", False),
),
"retry_count": 0,
"original_request": original_request,

View File

@@ -480,7 +480,7 @@ def retry_task(task_id):
config_params = get_config_params()
max_retries = config_params.get("maxRetries", 3)
initial_retry_delay = config_params.get("retryDelaySeconds", 5)
retry_delay_increase = config_params.get("retry_delay_increase", 5)
retry_delay_increase = config_params.get("retryDelayIncrease", 5)
# Check if we've exceeded max retries
if retry_count >= max_retries:
@@ -1612,16 +1612,16 @@ def download_track(self, **task_data):
config_params.get("customTrackFormat", "%tracknum%. %music%"),
)
pad_tracks = task_data.get(
"pad_tracks", config_params.get("tracknum_padding", True)
"pad_tracks", config_params.get("tracknumPadding", True)
)
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
save_cover = task_data.get("save_cover", config_params.get("saveCover", True))
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
recursive_quality = task_data.get(
"recursive_quality", config_params.get("recursive_quality", False)
"recursive_quality", config_params.get("recursiveQuality", False)
)
artist_separator = task_data.get(
"artist_separator", config_params.get("artist_separator", "; ")
"artist_separator", config_params.get("artistSeparator", "; ")
)
# Execute the download - service is now determined from URL
@@ -1707,16 +1707,16 @@ def download_album(self, **task_data):
config_params.get("customTrackFormat", "%tracknum%. %music%"),
)
pad_tracks = task_data.get(
"pad_tracks", config_params.get("tracknum_padding", True)
"pad_tracks", config_params.get("tracknumPadding", True)
)
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
save_cover = task_data.get("save_cover", config_params.get("saveCover", True))
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
recursive_quality = task_data.get(
"recursive_quality", config_params.get("recursive_quality", False)
"recursive_quality", config_params.get("recursiveQuality", False)
)
artist_separator = task_data.get(
"artist_separator", config_params.get("artist_separator", "; ")
"artist_separator", config_params.get("artistSeparator", "; ")
)
# Execute the download - service is now determined from URL
@@ -1802,16 +1802,16 @@ def download_playlist(self, **task_data):
config_params.get("customTrackFormat", "%tracknum%. %music%"),
)
pad_tracks = task_data.get(
"pad_tracks", config_params.get("tracknum_padding", True)
"pad_tracks", config_params.get("tracknumPadding", True)
)
save_cover = task_data.get("save_cover", config_params.get("save_cover", True))
save_cover = task_data.get("save_cover", config_params.get("saveCover", True))
convert_to = task_data.get("convertTo", config_params.get("convertTo"))
bitrate = task_data.get("bitrate", config_params.get("bitrate"))
recursive_quality = task_data.get(
"recursive_quality", config_params.get("recursive_quality", False)
"recursive_quality", config_params.get("recursiveQuality", False)
)
artist_separator = task_data.get(
"artist_separator", config_params.get("artist_separator", "; ")
"artist_separator", config_params.get("artistSeparator", "; ")
)
# Get retry parameters
@@ -1819,7 +1819,7 @@ def download_playlist(self, **task_data):
"initial_retry_delay", config_params.get("retryDelaySeconds", 5)
)
retry_delay_increase = task_data.get(
"retry_delay_increase", config_params.get("retry_delay_increase", 5)
"retry_delay_increase", config_params.get("retryDelayIncrease", 5)
)
max_retries = task_data.get("max_retries", config_params.get("maxRetries", 3))

View File

@@ -41,6 +41,7 @@ EXPECTED_PLAYLIST_TRACKS_COLUMNS = {
"is_present_in_spotify": "INTEGER DEFAULT 1",
"last_seen_in_spotify": "INTEGER",
"snapshot_id": "TEXT", # Track the snapshot_id when this track was added/updated
"final_path": "TEXT", # Absolute path of the downloaded file from deezspot callback
}
EXPECTED_WATCHED_ARTISTS_COLUMNS = {
@@ -883,8 +884,12 @@ def add_single_track_to_playlist_db(
if not album_artist_names:
album_artist_names = "N/A"
# Extract final_path from status_info if present (new deezspot field)
status_info = callback_data.get("status_info", {}) or {}
final_path = status_info.get("final_path")
logger.debug(
f"Extracted metadata from deezspot callback for '{track_name}': track_number={track_number}"
f"Extracted metadata from deezspot callback for '{track_name}': track_number={track_number}, final_path={final_path}"
)
except Exception as e:
@@ -921,6 +926,7 @@ def add_single_track_to_playlist_db(
1,
current_time,
snapshot_id,
final_path,
)
try:
with _get_playlists_db_connection() as conn: # Use playlists connection
@@ -929,8 +935,8 @@ def add_single_track_to_playlist_db(
cursor.execute(
f"""
INSERT OR REPLACE INTO {table_name}
(spotify_track_id, title, artist_names, album_name, album_artist_names, track_number, album_spotify_id, duration_ms, added_at_playlist, added_to_db, is_present_in_spotify, last_seen_in_spotify, snapshot_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
(spotify_track_id, title, artist_names, album_name, album_artist_names, track_number, album_spotify_id, duration_ms, added_at_playlist, added_to_db, is_present_in_spotify, last_seen_in_spotify, snapshot_id, final_path)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
track_data_tuple,
)

View File

@@ -2,7 +2,6 @@ import time
import threading
import logging
import json
import os
import re
from pathlib import Path
from typing import Any, List, Dict
@@ -32,69 +31,122 @@ from routes.utils.get_info import (
from routes.utils.celery_queue_manager import download_queue_manager
logger = logging.getLogger(__name__)
CONFIG_FILE_PATH = Path("./data/config/watch.json")
MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json")
WATCH_OLD_FILE_PATH = Path("./data/config/watch.json")
STOP_EVENT = threading.Event()
# Format mapping for audio file conversions
AUDIO_FORMAT_EXTENSIONS = {
"mp3": ".mp3",
"flac": ".flac",
"m4a": ".m4a",
"aac": ".m4a",
"ogg": ".ogg",
"wav": ".wav",
}
DEFAULT_WATCH_CONFIG = {
"enabled": False,
"watchPollIntervalSeconds": 3600,
"max_tracks_per_run": 50, # For playlists
"watchedArtistAlbumGroup": ["album", "single"], # Default for artists
"delay_between_playlists_seconds": 2,
"delay_between_artists_seconds": 5, # Added for artists
"use_snapshot_id_checking": True, # Enable snapshot_id checking for efficiency
"maxTracksPerRun": 50,
"watchedArtistAlbumGroup": ["album", "single"],
"delayBetweenPlaylistsSeconds": 2,
"delayBetweenArtistsSeconds": 5,
"useSnapshotIdChecking": True,
}
def get_watch_config():
"""Loads the watch configuration from watch.json.
Creates the file with defaults if it doesn't exist.
Ensures all default keys are present in the loaded config.
"""Loads the watch configuration from main.json's 'watch' key (camelCase).
Applies defaults and migrates legacy snake_case keys if found.
"""
try:
# Ensure ./data/config directory exists
CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
if not CONFIG_FILE_PATH.exists():
logger.info(
f"{CONFIG_FILE_PATH} not found. Creating with default watch config."
)
with open(CONFIG_FILE_PATH, "w") as f:
json.dump(DEFAULT_WATCH_CONFIG, f, indent=2)
MAIN_CONFIG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
if not MAIN_CONFIG_FILE_PATH.exists():
# Create main config with default watch block
with open(MAIN_CONFIG_FILE_PATH, "w") as f:
json.dump({"watch": DEFAULT_WATCH_CONFIG}, f, indent=2)
return DEFAULT_WATCH_CONFIG.copy()
with open(CONFIG_FILE_PATH, "r") as f:
config = json.load(f)
with open(MAIN_CONFIG_FILE_PATH, "r") as f:
main_cfg = json.load(f) or {}
updated = False
for key, value in DEFAULT_WATCH_CONFIG.items():
if key not in config:
config[key] = value
updated = True
watch_cfg = main_cfg.get("watch", {}) or {}
if updated:
logger.info(
f"Watch configuration at {CONFIG_FILE_PATH} was missing some default keys. Updated with defaults."
)
with open(CONFIG_FILE_PATH, "w") as f:
json.dump(config, f, indent=2)
return config
# Detect legacy watch.json and migrate it into main.json's watch key
legacy_file_found = False
legacy_migrated_ok = False
if WATCH_OLD_FILE_PATH.exists():
try:
with open(WATCH_OLD_FILE_PATH, "r") as wf:
legacy_watch = json.load(wf) or {}
# Map legacy snake_case keys to camelCase
legacy_to_camel_watch = {
"enabled": "enabled",
"watchPollIntervalSeconds": "watchPollIntervalSeconds",
"watch_poll_interval_seconds": "watchPollIntervalSeconds",
"watchedArtistAlbumGroup": "watchedArtistAlbumGroup",
"watched_artist_album_group": "watchedArtistAlbumGroup",
"delay_between_playlists_seconds": "delayBetweenPlaylistsSeconds",
"delay_between_artists_seconds": "delayBetweenArtistsSeconds",
"use_snapshot_id_checking": "useSnapshotIdChecking",
"max_tracks_per_run": "maxTracksPerRun",
}
migrated_watch = {}
for k, v in legacy_watch.items():
target_key = legacy_to_camel_watch.get(k, k)
migrated_watch[target_key] = v
# Merge with existing watch (legacy overrides existing)
watch_cfg.update(migrated_watch)
migrated = True
legacy_file_found = True
legacy_migrated_ok = True
except Exception as le:
logger.error(
f"Failed to migrate legacy watch.json: {le}", exc_info=True
)
# Migration: map legacy keys inside watch block if present
# Keep camelCase names in memory
legacy_to_camel = {
"watch_poll_interval_seconds": "watchPollIntervalSeconds",
"watched_artist_album_group": "watchedArtistAlbumGroup",
"delay_between_playlists_seconds": "delayBetweenPlaylistsSeconds",
"delay_between_artists_seconds": "delayBetweenArtistsSeconds",
"use_snapshot_id_checking": "useSnapshotIdChecking",
"max_tracks_per_run": "maxTracksPerRun",
}
migrated = False
for legacy_key, camel_key in legacy_to_camel.items():
if legacy_key in watch_cfg and camel_key not in watch_cfg:
watch_cfg[camel_key] = watch_cfg.pop(legacy_key)
migrated = True
# Ensure defaults
for k, v in DEFAULT_WATCH_CONFIG.items():
if k not in watch_cfg:
watch_cfg[k] = v
if migrated or legacy_file_found:
# Persist migration back to main.json
main_cfg["watch"] = watch_cfg
with open(MAIN_CONFIG_FILE_PATH, "w") as f:
json.dump(main_cfg, f, indent=2)
# Rename legacy file to avoid re-migration next start
if legacy_file_found and legacy_migrated_ok:
try:
WATCH_OLD_FILE_PATH.rename(
WATCH_OLD_FILE_PATH.with_suffix(".migrated")
)
logger.info(
f"Legacy watch.json migrated and renamed to {WATCH_OLD_FILE_PATH.with_suffix('.migrated')}"
)
except Exception:
try:
WATCH_OLD_FILE_PATH.unlink()
logger.info("Legacy watch.json migrated and removed.")
except Exception:
pass
return watch_cfg
except Exception as e:
logger.error(
f"Error loading or creating watch config at {CONFIG_FILE_PATH}: {e}",
f"Error loading watch config from {MAIN_CONFIG_FILE_PATH}: {e}",
exc_info=True,
)
return DEFAULT_WATCH_CONFIG.copy() # Fallback
return DEFAULT_WATCH_CONFIG.copy()
def construct_spotify_url(item_id, item_type="track"):
@@ -267,7 +319,7 @@ def check_watched_playlists(specific_playlist_id: str = None):
f"Playlist Watch Manager: Starting check. Specific playlist: {specific_playlist_id or 'All'}"
)
config = get_watch_config()
use_snapshot_checking = config.get("use_snapshot_id_checking", True)
use_snapshot_checking = config.get("useSnapshotIdChecking", True)
if specific_playlist_id:
playlist_obj = get_watched_playlist(specific_playlist_id)
@@ -546,7 +598,7 @@ def check_watched_playlists(specific_playlist_id: str = None):
exc_info=True,
)
time.sleep(max(1, config.get("delay_between_playlists_seconds", 2)))
time.sleep(max(1, config.get("delayBetweenPlaylistsSeconds", 2)))
logger.info("Playlist Watch Manager: Finished checking all watched playlists.")
@@ -766,7 +818,7 @@ def check_watched_artists(specific_artist_id: str = None):
exc_info=True,
)
time.sleep(max(1, config.get("delay_between_artists_seconds", 5)))
time.sleep(max(1, config.get("delayBetweenArtistsSeconds", 5)))
logger.info("Artist Watch Manager: Finished checking all watched artists.")
@@ -920,7 +972,7 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
# Get all tracks that are present in Spotify
cursor.execute(f"""
SELECT spotify_track_id, title, artist_names, album_name,
album_artist_names, track_number, duration_ms
album_artist_names, track_number, duration_ms, final_path
FROM {table_name}
WHERE is_present_in_spotify = 1
ORDER BY track_number, title
@@ -938,6 +990,9 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
or "Unknown Artist",
"track_number": row["track_number"] or 0,
"duration_ms": row["duration_ms"] or 0,
"final_path": row["final_path"]
if "final_path" in row.keys()
else None,
}
)
@@ -951,136 +1006,6 @@ def get_playlist_tracks_for_m3u(playlist_spotify_id: str) -> List[Dict[str, Any]
return tracks
def generate_track_file_path(
track: Dict[str, Any],
custom_dir_format: str,
custom_track_format: str,
convert_to: str = None,
) -> str:
"""
Generate the file path for a track based on custom format strings.
This mimics the path generation logic used by the deezspot library.
Args:
track: Track metadata dictionary
custom_dir_format: Directory format string (e.g., "%ar_album%/%album%")
custom_track_format: Track format string (e.g., "%tracknum%. %music% - %artist%")
convert_to: Target conversion format (e.g., "mp3", "flac", "m4a")
Returns:
Generated file path relative to output directory
"""
try:
# Extract metadata
artist_names = track.get("artist_names", "Unknown Artist")
album_name = track.get("album_name", "Unknown Album")
album_artist_names = track.get("album_artist_names", "Unknown Artist")
title = track.get("title", "Unknown Track")
track_number = track.get("track_number", 0)
duration_ms = track.get("duration_ms", 0)
# Use album artist for directory structure, main artist for track name
main_artist = artist_names.split(", ")[0] if artist_names else "Unknown Artist"
album_artist = (
album_artist_names.split(", ")[0] if album_artist_names else main_artist
)
# Clean names for filesystem
def clean_name(name):
# Remove or replace characters that are problematic in filenames
name = re.sub(r'[<>:"/\\|?*]', "_", str(name))
name = re.sub(r"[\x00-\x1f]", "", name) # Remove control characters
return name.strip()
clean_album_artist = clean_name(album_artist)
clean_album = clean_name(album_name)
clean_main_artist = clean_name(main_artist)
clean_title = clean_name(title)
# Prepare artist and album artist lists
artist_list = [clean_name(a) for a in re.split(r"\s*,\s*", artist_names or "") if a.strip()] or [clean_main_artist]
album_artist_list = [clean_name(a) for a in re.split(r"\s*,\s*", album_artist_names or "") if a.strip()] or [clean_album_artist]
# Prepare placeholder replacements
replacements = {
# Common placeholders
"%music%": clean_title,
"%artist%": clean_main_artist,
"%album%": clean_album,
"%ar_album%": clean_album_artist,
"%tracknum%": f"{track_number:02d}" if track_number > 0 else "00",
"%year%": "", # Not available in current DB schema
# Additional placeholders (not available in current DB schema, using defaults)
"%discnum%": "01", # Default to disc 1
"%date%": "", # Not available
"%genre%": "", # Not available
"%isrc%": "", # Not available
"%explicit%": "", # Not available
"%duration%": str(duration_ms // 1000)
if duration_ms > 0
else "0", # Convert ms to seconds
}
artist_indices = {int(i) for i in re.findall(r"%artist_(\d+)%", custom_dir_format + custom_track_format)}
ar_album_indices = {int(i) for i in re.findall(r"%ar_album_(\d+)%", custom_dir_format + custom_track_format)}
# Replace artist placeholders with actual values
for i in artist_indices:
idx = i - 1
value = artist_list[idx] if 0 <= idx < len(artist_list) else artist_list[0]
replacements[f"%artist_{i}%"] = value
# Replace album artist placeholders with actual values
for i in ar_album_indices:
idx = i - 1
value = album_artist_list[idx] if 0 <= idx < len(album_artist_list) else album_artist_list[0]
replacements[f"%ar_album_{i}%"] = value
# Apply replacements to directory format
dir_path = custom_dir_format
for placeholder, value in replacements.items():
dir_path = dir_path.replace(placeholder, value)
# Apply replacements to track format
track_filename = custom_track_format
for placeholder, value in replacements.items():
track_filename = track_filename.replace(placeholder, value)
# Combine and clean up path
full_path = os.path.join(dir_path, track_filename)
full_path = os.path.normpath(full_path)
# Determine file extension based on convert_to setting or default to mp3
if not any(
full_path.lower().endswith(ext)
for ext in [".mp3", ".flac", ".m4a", ".ogg", ".wav"]
):
if convert_to:
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), ".mp3")
full_path += extension
else:
full_path += ".mp3" # Default fallback
return full_path
except Exception as e:
logger.error(
f"Error generating file path for track {track.get('title', 'Unknown')}: {e}"
)
# Return a fallback path with appropriate extension
safe_title = re.sub(
r'[<>:"/\\|?*\x00-\x1f]', "_", str(track.get("title", "Unknown Track"))
)
# Determine extension for fallback
if convert_to:
extension = AUDIO_FORMAT_EXTENSIONS.get(convert_to.lower(), ".mp3")
else:
extension = ".mp3"
return f"Unknown Artist/Unknown Album/{safe_title}{extension}"
def update_playlist_m3u_file(playlist_spotify_id: str):
"""
Generate/update the m3u file for a watched playlist based on tracks in the database.
@@ -1100,13 +1025,7 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
playlist_name = playlist_info.get("name", "Unknown Playlist")
# Get configuration settings
from routes.utils.celery_config import get_config_params
config = get_config_params()
custom_dir_format = config.get("customDirFormat", "%ar_album%/%album%")
custom_track_format = config.get("customTrackFormat", "%tracknum%. %music%")
convert_to = config.get("convertTo") # Get conversion format setting
output_dir = (
"./downloads" # This matches the output_dir used in download functions
)
@@ -1131,20 +1050,26 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
# Generate m3u content
m3u_lines = ["#EXTM3U"]
included_count = 0
skipped_missing_final_path = 0
for track in tracks:
# Generate file path for this track
track_file_path = generate_track_file_path(
track, custom_dir_format, custom_track_format, convert_to
)
# Create relative path from m3u file location to track file
# M3U file is in ./downloads/playlists/
# Track files are in ./downloads/{custom_dir_format}/
relative_path = os.path.join("..", track_file_path)
relative_path = relative_path.replace(
"\\", "/"
) # Use forward slashes for m3u compatibility
# Use final_path from deezspot summary and convert from ./downloads to ../ relative path
final_path = track.get("final_path")
if not final_path:
skipped_missing_final_path += 1
continue
normalized = str(final_path).replace("\\", "/")
if normalized.startswith("./downloads/"):
relative_path = normalized.replace("./downloads/", "../", 1)
elif "/downloads/" in normalized.lower():
idx = normalized.lower().rfind("/downloads/")
relative_path = "../" + normalized[idx + len("/downloads/") :]
elif normalized.startswith("downloads/"):
relative_path = "../" + normalized[len("downloads/") :]
else:
# As per assumption, everything is under downloads; if not, keep as-is
relative_path = normalized
# Add EXTINF line with track duration and title
duration_seconds = (
@@ -1156,13 +1081,14 @@ def update_playlist_m3u_file(playlist_spotify_id: str):
m3u_lines.append(f"#EXTINF:{duration_seconds},{artist_and_title}")
m3u_lines.append(relative_path)
included_count += 1
# Write m3u file
with open(m3u_file_path, "w", encoding="utf-8") as f:
f.write("\n".join(m3u_lines))
logger.info(
f"Updated m3u file for playlist '{playlist_name}' at {m3u_file_path} with {len(tracks)} tracks{f' (format: {convert_to})' if convert_to else ''}."
f"Updated m3u file for playlist '{playlist_name}' at {m3u_file_path} with {included_count} entries.{f' Skipped {skipped_missing_final_path} without final_path.' if skipped_missing_final_path else ''}"
)
except Exception as e:

View File

@@ -21,7 +21,7 @@ interface DownloadSettings {
hlsThreads: number;
deezerQuality: "MP3_128" | "MP3_320" | "FLAC";
spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH";
recursiveQuality?: boolean; // frontend field (mapped to recursive_quality on save)
recursiveQuality: boolean; // frontend field (sent as camelCase to backend)
}
interface WatchConfig {
@@ -50,13 +50,8 @@ const CONVERSION_FORMATS: Record<string, string[]> = {
};
// --- API Functions ---
const saveDownloadConfig = async (data: Partial<DownloadSettings> & { recursive_quality?: boolean }) => {
// Map camelCase to snake_case for backend compatibility
const saveDownloadConfig = async (data: Partial<DownloadSettings>) => {
const payload: any = { ...data };
if (typeof data.recursiveQuality !== "undefined") {
payload.recursive_quality = data.recursiveQuality;
delete payload.recursiveQuality;
}
const { data: response } = await authApiClient.client.post("/config", payload);
return response;
};

View File

@@ -25,10 +25,6 @@ interface FormattingTabProps {
// --- API Functions ---
const saveFormattingConfig = async (data: Partial<FormattingSettings>) => {
const payload: any = { ...data };
if (typeof data.artistSeparator !== "undefined") {
payload.artist_separator = data.artistSeparator;
delete payload.artistSeparator;
}
const { data: response } = await authApiClient.client.post("/config", payload);
return response;
};