From 49abfbc03fdb57bbbf3fba1fd87b809b40aa7787 Mon Sep 17 00:00:00 2001 From: Michael Usher Date: Mon, 18 Aug 2025 18:10:58 +1000 Subject: [PATCH 01/14] fix: error when playlist images attribute is null --- spotizerr-ui/src/routes/playlist.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spotizerr-ui/src/routes/playlist.tsx b/spotizerr-ui/src/routes/playlist.tsx index e9171d8..342a65a 100644 --- a/spotizerr-ui/src/routes/playlist.tsx +++ b/spotizerr-ui/src/routes/playlist.tsx @@ -190,7 +190,7 @@ export const Playlist = () => {
{playlistMetadata.name} @@ -255,7 +255,7 @@ export const Playlist = () => { {index + 1} {track.album.name} From 34ad69f40d7f37365ac9c9ec8c50555df99fd5fe Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 18 Aug 2025 09:21:10 -0600 Subject: [PATCH 02/14] Add contributing guidelines --- CONTRIBUTING.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..a47f964 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,5 @@ +# Contributing guidelines + +- All pull requests must be made to `dev` branch + +- When implementing a feature related to downloading, follow the rule of choice: Every download must come from an active decision made by the user (e.g. clicking a download button, deciding the user wants a whole artist's discography, etc.). This takes out of the picture features like recommendation algorithms, auto-genererated playlists, etc. From 8619d1d50796b630e51cbae6fc44c88e5d84cc46 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Mon, 18 Aug 2025 09:24:50 -0600 Subject: [PATCH 03/14] Add guidelines to readme --- README.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index bd1653f..5c6ecc6 100644 --- a/README.md +++ b/README.md @@ -145,7 +145,7 @@ If you self-host a music server with other users than yourself, you almost certa ### Monitor an Artist 1. Search for the artist -2. Click "Add to Watchlist" +2. Click "Add to Watchlist" 3. Configure which release types to monitor (albums, singles, etc.) 4. New releases will be automatically downloaded @@ -244,6 +244,10 @@ This software is for educational purposes and personal use only. Ensure you comp - Downloaded files retain original metadata - Service limitations apply based on account types +### Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) + ## 🙏 Acknowledgements -This project was inspired by the amazing [deezspot library](https://github.com/jakiepari/deezspot). Although their creators are in no way related to Spotizerr, they still deserve credit for their excellent work. +This project was inspired by the amazing [deezspot library](https://github.com/jakiepari/deezspot). Although their creators are in no way related to Spotizerr, they still deserve credit for their excellent work. From 015ae024a60592a4755d9d0d33b4a59e53dce5ae Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 09:44:58 -0500 Subject: [PATCH 04/14] fix: #275 --- routes/content/artist.py | 156 +++++++++++++++++++++++++-------------- routes/utils/artist.py | 17 ++++- 2 files changed, 114 insertions(+), 59 deletions(-) diff --git a/routes/content/artist.py b/routes/content/artist.py index 33c14db..e967ca8 100644 --- a/routes/content/artist.py +++ b/routes/content/artist.py @@ -24,7 +24,7 @@ from routes.utils.watch.manager import check_watched_artists, get_watch_config from routes.utils.get_info import get_spotify_info # Import authentication dependencies -from routes.auth.middleware import require_auth_from_state, require_admin_from_state, User +from routes.auth.middleware import require_auth_from_state, User router = APIRouter() @@ -43,7 +43,11 @@ def log_json(message_dict): @router.get("/download/{artist_id}") -async def handle_artist_download(artist_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def handle_artist_download( + artist_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): """ Enqueues album download tasks for the given artist. Expected query parameters: @@ -58,8 +62,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user: # Validate required parameters if not url: # This check is mostly for safety, as url is constructed return JSONResponse( - content={"error": "Missing required parameter: url"}, - status_code=400 + content={"error": "Missing required parameter: url"}, status_code=400 ) try: @@ -68,7 +71,10 @@ async def handle_artist_download(artist_id: str, request: Request, current_user: # Delegate to the download_artist_albums function which will handle album filtering successfully_queued_albums, duplicate_albums = download_artist_albums( - url=url, album_type=album_type, request_args=dict(request.query_params) + url=url, + album_type=album_type, + request_args=dict(request.query_params), + username=current_user.username, ) # Return the list of album task IDs. @@ -85,7 +91,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user: return JSONResponse( content=response_data, - status_code=202 # Still 202 Accepted as some operations may have succeeded + status_code=202, # Still 202 Accepted as some operations may have succeeded ) except Exception as e: return JSONResponse( @@ -94,7 +100,7 @@ async def handle_artist_download(artist_id: str, request: Request, current_user: "message": str(e), "traceback": traceback.format_exc(), }, - status_code=500 + status_code=500, ) @@ -106,12 +112,14 @@ async def cancel_artist_download(): """ return JSONResponse( content={"error": "Artist download cancellation is not supported."}, - status_code=400 + status_code=400, ) @router.get("/info") -async def get_artist_info(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_artist_info( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Retrieves Spotify artist metadata given a Spotify artist ID. Expects a query parameter 'id' with the Spotify artist ID. @@ -119,27 +127,25 @@ async def get_artist_info(request: Request, current_user: User = Depends(require spotify_id = request.query_params.get("id") if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) + return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: # Get artist metadata first artist_metadata = get_spotify_info(spotify_id, "artist") - + # Get artist discography for albums artist_discography = get_spotify_info(spotify_id, "artist_discography") - + # Combine metadata with discography - artist_info = { - **artist_metadata, - "albums": artist_discography - } + artist_info = {**artist_metadata, "albums": artist_discography} # If artist_info is successfully fetched and has albums, # check if the artist is watched and augment album items with is_locally_known status - if artist_info and artist_info.get("albums") and artist_info["albums"].get("items"): + if ( + artist_info + and artist_info.get("albums") + and artist_info["albums"].get("items") + ): watched_artist_details = get_watched_artist( spotify_id ) # spotify_id is the artist ID @@ -155,13 +161,11 @@ async def get_artist_info(request: Request, current_user: User = Depends(require # If not watched, or no albums, is_locally_known will not be added. # Frontend should handle absence of this key as false. - return JSONResponse( - content=artist_info, status_code=200 - ) + return JSONResponse(content=artist_info, status_code=200) except Exception as e: return JSONResponse( content={"error": str(e), "traceback": traceback.format_exc()}, - status_code=500 + status_code=500, ) @@ -169,11 +173,16 @@ async def get_artist_info(request: Request, current_user: User = Depends(require @router.put("/watch/{artist_spotify_id}") -async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def add_artist_to_watchlist( + artist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Adds an artist to the watchlist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): - raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) + raise HTTPException( + status_code=403, + detail={"error": "Watch feature is currently disabled globally."}, + ) logger.info(f"Attempting to add artist {artist_spotify_id} to watchlist.") try: @@ -182,7 +191,7 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D # Get artist metadata directly for name and basic info artist_metadata = get_spotify_info(artist_spotify_id, "artist") - + # Get artist discography for album count artist_album_list_data = get_spotify_info( artist_spotify_id, "artist_discography" @@ -197,7 +206,7 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D status_code=404, detail={ "error": f"Could not fetch artist metadata for {artist_spotify_id} to initiate watch." - } + }, ) # Check if we got album data @@ -213,7 +222,9 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D "id": artist_spotify_id, "name": artist_metadata.get("name", "Unknown Artist"), "albums": { # Mimic structure if add_artist_db expects it for total_albums - "total": artist_album_list_data.get("total", 0) if artist_album_list_data else 0 + "total": artist_album_list_data.get("total", 0) + if artist_album_list_data + else 0 }, # Add any other fields add_artist_db might expect from a true artist object if necessary } @@ -232,11 +243,16 @@ async def add_artist_to_watchlist(artist_spotify_id: str, current_user: User = D logger.error( f"Error adding artist {artist_spotify_id} to watchlist: {e}", exc_info=True ) - raise HTTPException(status_code=500, detail={"error": f"Could not add artist to watchlist: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not add artist to watchlist: {str(e)}"}, + ) @router.get("/watch/{artist_spotify_id}/status") -async def get_artist_watch_status(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def get_artist_watch_status( + artist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Checks if a specific artist is being watched.""" logger.info(f"Checking watch status for artist {artist_spotify_id}.") try: @@ -250,22 +266,29 @@ async def get_artist_watch_status(artist_spotify_id: str, current_user: User = D f"Error checking watch status for artist {artist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not check watch status: {str(e)}"}) + raise HTTPException( + status_code=500, detail={"error": f"Could not check watch status: {str(e)}"} + ) @router.delete("/watch/{artist_spotify_id}") -async def remove_artist_from_watchlist(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def remove_artist_from_watchlist( + artist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Removes an artist from the watchlist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): - raise HTTPException(status_code=403, detail={"error": "Watch feature is currently disabled globally."}) + raise HTTPException( + status_code=403, + detail={"error": "Watch feature is currently disabled globally."}, + ) logger.info(f"Attempting to remove artist {artist_spotify_id} from watchlist.") try: if not get_watched_artist(artist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Artist {artist_spotify_id} not found in watchlist."} + detail={"error": f"Artist {artist_spotify_id} not found in watchlist."}, ) remove_artist_db(artist_spotify_id) @@ -280,23 +303,30 @@ async def remove_artist_from_watchlist(artist_spotify_id: str, current_user: Use ) raise HTTPException( status_code=500, - detail={"error": f"Could not remove artist from watchlist: {str(e)}"} + detail={"error": f"Could not remove artist from watchlist: {str(e)}"}, ) @router.get("/watch/list") -async def list_watched_artists_endpoint(current_user: User = Depends(require_auth_from_state)): +async def list_watched_artists_endpoint( + current_user: User = Depends(require_auth_from_state), +): """Lists all artists currently in the watchlist.""" try: artists = get_watched_artists() return [dict(artist) for artist in artists] except Exception as e: logger.error(f"Error listing watched artists: {e}", exc_info=True) - raise HTTPException(status_code=500, detail={"error": f"Could not list watched artists: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not list watched artists: {str(e)}"}, + ) @router.post("/watch/trigger_check") -async def trigger_artist_check_endpoint(current_user: User = Depends(require_auth_from_state)): +async def trigger_artist_check_endpoint( + current_user: User = Depends(require_auth_from_state), +): """Manually triggers the artist checking mechanism for all watched artists.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -304,7 +334,7 @@ async def trigger_artist_check_endpoint(current_user: User = Depends(require_aut status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot trigger check." - } + }, ) logger.info("Manual trigger for artist check received for all artists.") @@ -320,12 +350,14 @@ async def trigger_artist_check_endpoint(current_user: User = Depends(require_aut ) raise HTTPException( status_code=500, - detail={"error": f"Could not trigger artist check for all: {str(e)}"} + detail={"error": f"Could not trigger artist check for all: {str(e)}"}, ) @router.post("/watch/trigger_check/{artist_spotify_id}") -async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current_user: User = Depends(require_auth_from_state)): +async def trigger_specific_artist_check_endpoint( + artist_spotify_id: str, current_user: User = Depends(require_auth_from_state) +): """Manually triggers the artist checking mechanism for a specific artist.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -333,7 +365,7 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot trigger check." - } + }, ) logger.info( @@ -349,7 +381,7 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current status_code=404, detail={ "error": f"Artist {artist_spotify_id} is not in the watchlist. Add it first." - } + }, ) thread = threading.Thread( @@ -373,12 +405,16 @@ async def trigger_specific_artist_check_endpoint(artist_spotify_id: str, current status_code=500, detail={ "error": f"Could not trigger artist check for {artist_spotify_id}: {str(e)}" - } + }, ) @router.post("/watch/{artist_spotify_id}/albums") -async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def mark_albums_as_known_for_artist( + artist_spotify_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): """Fetches details for given album IDs and adds/updates them in the artist's local DB table.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -386,7 +422,7 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot mark albums." - } + }, ) logger.info(f"Attempting to mark albums as known for artist {artist_spotify_id}.") @@ -399,13 +435,13 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque status_code=400, detail={ "error": "Invalid request body. Expecting a JSON array of album Spotify IDs." - } + }, ) if not get_watched_artist(artist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Artist {artist_spotify_id} is not being watched."} + detail={"error": f"Artist {artist_spotify_id} is not being watched."}, ) fetched_albums_details = [] @@ -446,11 +482,18 @@ async def mark_albums_as_known_for_artist(artist_spotify_id: str, request: Reque f"Error marking albums as known for artist {artist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as known: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not mark albums as known: {str(e)}"}, + ) @router.delete("/watch/{artist_spotify_id}/albums") -async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def mark_albums_as_missing_locally_for_artist( + artist_spotify_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): """Removes specified albums from the artist's local DB table.""" watch_config = get_watch_config() if not watch_config.get("enabled", False): @@ -458,7 +501,7 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ status_code=403, detail={ "error": "Watch feature is currently disabled globally. Cannot mark albums." - } + }, ) logger.info( @@ -473,13 +516,13 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ status_code=400, detail={ "error": "Invalid request body. Expecting a JSON array of album Spotify IDs." - } + }, ) if not get_watched_artist(artist_spotify_id): raise HTTPException( status_code=404, - detail={"error": f"Artist {artist_spotify_id} is not being watched."} + detail={"error": f"Artist {artist_spotify_id} is not being watched."}, ) deleted_count = remove_specific_albums_from_artist_table( @@ -498,4 +541,7 @@ async def mark_albums_as_missing_locally_for_artist(artist_spotify_id: str, requ f"Error marking albums as missing (deleting locally) for artist {artist_spotify_id}: {e}", exc_info=True, ) - raise HTTPException(status_code=500, detail={"error": f"Could not mark albums as missing: {str(e)}"}) + raise HTTPException( + status_code=500, + detail={"error": f"Could not mark albums as missing: {str(e)}"}, + ) diff --git a/routes/utils/artist.py b/routes/utils/artist.py index 166914b..b9d8126 100644 --- a/routes/utils/artist.py +++ b/routes/utils/artist.py @@ -87,7 +87,7 @@ def get_artist_discography( def download_artist_albums( - url, album_type="album,single,compilation", request_args=None + url, album_type="album,single,compilation", request_args=None, username=None ): """ Download albums by an artist, filtered by album types. @@ -97,6 +97,7 @@ def download_artist_albums( album_type (str): Comma-separated list of album types to download (album, single, compilation, appears_on) request_args (dict): Original request arguments for tracking + username (str | None): Username initiating the request, used for per-user separation Returns: tuple: (list of successfully queued albums, list of duplicate albums) @@ -160,11 +161,15 @@ def download_artist_albums( album_name = album.get("name", "Unknown Album") album_artists = album.get("artists", []) album_artist = ( - album_artists[0].get("name", "Unknown Artist") if album_artists else "Unknown Artist" + album_artists[0].get("name", "Unknown Artist") + if album_artists + else "Unknown Artist" ) if not album_url: - logger.warning(f"Skipping album '{album_name}' because it has no Spotify URL.") + logger.warning( + f"Skipping album '{album_name}' because it has no Spotify URL." + ) continue task_data = { @@ -174,6 +179,8 @@ def download_artist_albums( "artist": album_artist, "orig_request": request_args, } + if username: + task_data["username"] = username try: task_id = download_queue_manager.add_task(task_data) @@ -199,7 +206,9 @@ def download_artist_albums( } ) except Exception as e: - logger.error(f"Failed to queue album {album_name} for an unknown reason: {e}") + logger.error( + f"Failed to queue album {album_name} for an unknown reason: {e}" + ) logger.info( f"Artist album processing: {len(successfully_queued_albums)} queued, {len(duplicate_albums)} duplicates found." From 6538cde022ca95dee670319835fd0a7ef37902ff Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 09:36:22 -0600 Subject: [PATCH 05/14] fix: #274 --- app.py | 468 +++++++++++++++++++++++------------------ routes/core/history.py | 275 ++++++++++++------------ 2 files changed, 393 insertions(+), 350 deletions(-) diff --git a/app.py b/app.py index 13c2a88..b520e4a 100755 --- a/app.py +++ b/app.py @@ -8,7 +8,6 @@ import logging.handlers import time from pathlib import Path import os -import atexit import sys import redis import socket @@ -16,11 +15,16 @@ from urllib.parse import urlparse # Run DB migrations as early as possible, before importing any routers that may touch DBs try: - from routes.migrations import run_migrations_if_needed - run_migrations_if_needed() - logging.getLogger(__name__).info("Database migrations executed (if needed) early in startup.") + from routes.migrations import run_migrations_if_needed + + run_migrations_if_needed() + logging.getLogger(__name__).info( + "Database migrations executed (if needed) early in startup." + ) except Exception as e: - logging.getLogger(__name__).error(f"Database migration step failed early in startup: {e}", exc_info=True) + logging.getLogger(__name__).error( + f"Database migration step failed early in startup: {e}", exc_info=True + ) # Import route routers (to be created) from routes.auth.credentials import router as credentials_router @@ -44,251 +48,299 @@ from routes.auth import AUTH_ENABLED from routes.auth.middleware import AuthMiddleware # Import and initialize routes (this will start the watch manager) -import routes # Configure application-wide logging def setup_logging(): - """Configure application-wide logging with rotation""" - # Create logs directory if it doesn't exist - logs_dir = Path("logs") - logs_dir.mkdir(exist_ok=True) + """Configure application-wide logging with rotation""" + # Create logs directory if it doesn't exist + logs_dir = Path("logs") + logs_dir.mkdir(exist_ok=True) - # Set up log file paths - main_log = logs_dir / "spotizerr.log" + # Set up log file paths + main_log = logs_dir / "spotizerr.log" - # Configure root logger - root_logger = logging.getLogger() - root_logger.setLevel(logging.DEBUG) + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(logging.DEBUG) - # Clear any existing handlers from the root logger - if root_logger.hasHandlers(): - root_logger.handlers.clear() + # Clear any existing handlers from the root logger + if root_logger.hasHandlers(): + root_logger.handlers.clear() - # Log formatting - log_format = logging.Formatter( - "%(asctime)s [%(levelname)s] %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) + # Log formatting + log_format = logging.Formatter( + "%(asctime)s [%(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) - # File handler with rotation (10 MB max, keep 5 backups) - file_handler = logging.handlers.RotatingFileHandler( - main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8" - ) - file_handler.setFormatter(log_format) - file_handler.setLevel(logging.INFO) + # File handler with rotation (10 MB max, keep 5 backups) + file_handler = logging.handlers.RotatingFileHandler( + main_log, maxBytes=10 * 1024 * 1024, backupCount=5, encoding="utf-8" + ) + file_handler.setFormatter(log_format) + file_handler.setLevel(logging.INFO) - # Console handler for stderr - console_handler = logging.StreamHandler(sys.stderr) - console_handler.setFormatter(log_format) - console_handler.setLevel(logging.INFO) + # Console handler for stderr + console_handler = logging.StreamHandler(sys.stderr) + console_handler.setFormatter(log_format) + console_handler.setLevel(logging.INFO) - # Add handlers to root logger - root_logger.addHandler(file_handler) - root_logger.addHandler(console_handler) + # Add handlers to root logger + root_logger.addHandler(file_handler) + root_logger.addHandler(console_handler) - # Set up specific loggers - for logger_name in [ - "routes", - "routes.utils", - "routes.utils.celery_manager", - "routes.utils.celery_tasks", - "routes.utils.watch", - ]: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.INFO) - logger.propagate = True # Propagate to root logger + # Set up specific loggers + for logger_name in [ + "routes", + "routes.utils", + "routes.utils.celery_manager", + "routes.utils.celery_tasks", + "routes.utils.watch", + ]: + logger = logging.getLogger(logger_name) + logger.setLevel(logging.INFO) + logger.propagate = True # Propagate to root logger - logging.info("Logging system initialized") + logging.info("Logging system initialized") def check_redis_connection(): - """Check if Redis is available and accessible""" - if not REDIS_URL: - logging.error("REDIS_URL is not configured. Please check your environment.") - return False + """Check if Redis is available and accessible""" + if not REDIS_URL: + logging.error("REDIS_URL is not configured. Please check your environment.") + return False - try: - # Parse Redis URL - parsed_url = urlparse(REDIS_URL) - host = parsed_url.hostname or "localhost" - port = parsed_url.port or 6379 + try: + # Parse Redis URL + parsed_url = urlparse(REDIS_URL) + host = parsed_url.hostname or "localhost" + port = parsed_url.port or 6379 - logging.info(f"Testing Redis connection to {host}:{port}...") + logging.info(f"Testing Redis connection to {host}:{port}...") - # Test socket connection first - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.settimeout(5) - result = sock.connect_ex((host, port)) - sock.close() + # Test socket connection first + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + result = sock.connect_ex((host, port)) + sock.close() - if result != 0: - logging.error(f"Cannot connect to Redis at {host}:{port}") - return False + if result != 0: + logging.error(f"Cannot connect to Redis at {host}:{port}") + return False - # Test Redis client connection - r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5) - r.ping() - logging.info("Redis connection successful") - return True + # Test Redis client connection + r = redis.from_url(REDIS_URL, socket_connect_timeout=5, socket_timeout=5) + r.ping() + logging.info("Redis connection successful") + return True - except redis.ConnectionError as e: - logging.error(f"Redis connection error: {e}") - return False - except redis.TimeoutError as e: - logging.error(f"Redis timeout error: {e}") - return False - except Exception as e: - logging.error(f"Unexpected error checking Redis connection: {e}") - return False + except redis.ConnectionError as e: + logging.error(f"Redis connection error: {e}") + return False + except redis.TimeoutError as e: + logging.error(f"Redis timeout error: {e}") + return False + except Exception as e: + logging.error(f"Unexpected error checking Redis connection: {e}") + return False @asynccontextmanager async def lifespan(app: FastAPI): - """Handle application startup and shutdown""" - # Startup - setup_logging() - - # Check Redis connection - if not check_redis_connection(): - logging.error("Failed to connect to Redis. Please ensure Redis is running and accessible.") - # Don't exit, but warn - some functionality may not work - - # Start Celery workers - try: - celery_manager.start() - logging.info("Celery workers started successfully") - except Exception as e: - logging.error(f"Failed to start Celery workers: {e}") - - yield - - # Shutdown - try: - celery_manager.stop() - logging.info("Celery workers stopped") - except Exception as e: - logging.error(f"Error stopping Celery workers: {e}") + """Handle application startup and shutdown""" + # Startup + setup_logging() + + # Check Redis connection + if not check_redis_connection(): + logging.error( + "Failed to connect to Redis. Please ensure Redis is running and accessible." + ) + # Don't exit, but warn - some functionality may not work + + # Start Celery workers + try: + celery_manager.start() + logging.info("Celery workers started successfully") + except Exception as e: + logging.error(f"Failed to start Celery workers: {e}") + + yield + + # Shutdown + try: + celery_manager.stop() + logging.info("Celery workers stopped") + except Exception as e: + logging.error(f"Error stopping Celery workers: {e}") def create_app(): - app = FastAPI( - title="Spotizerr API", - description="Music download service API", - version="3.0.0", - lifespan=lifespan, - redirect_slashes=True # Enable automatic trailing slash redirects - ) + app = FastAPI( + title="Spotizerr API", + description="Music download service API", + version="3.0.0", + lifespan=lifespan, + redirect_slashes=True, # Enable automatic trailing slash redirects + ) - # Set up CORS - app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) + # Set up CORS + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) - # Add authentication middleware (only if auth is enabled) - if AUTH_ENABLED: - app.add_middleware(AuthMiddleware) - logging.info("Authentication system enabled") - else: - logging.info("Authentication system disabled") + # Add authentication middleware (only if auth is enabled) + if AUTH_ENABLED: + app.add_middleware(AuthMiddleware) + logging.info("Authentication system enabled") + else: + logging.info("Authentication system disabled") - # Register routers with URL prefixes - app.include_router(auth_router, prefix="/api/auth", tags=["auth"]) - - # Include SSO router if available - try: - from routes.auth.sso import router as sso_router - app.include_router(sso_router, prefix="/api/auth", tags=["sso"]) - logging.info("SSO functionality enabled") - except ImportError as e: - logging.warning(f"SSO functionality not available: {e}") - app.include_router(config_router, prefix="/api/config", tags=["config"]) - app.include_router(search_router, prefix="/api/search", tags=["search"]) - app.include_router(credentials_router, prefix="/api/credentials", tags=["credentials"]) - app.include_router(album_router, prefix="/api/album", tags=["album"]) - app.include_router(track_router, prefix="/api/track", tags=["track"]) - app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"]) - app.include_router(artist_router, prefix="/api/artist", tags=["artist"]) - app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"]) - app.include_router(history_router, prefix="/api/history", tags=["history"]) + # Register routers with URL prefixes + app.include_router(auth_router, prefix="/api/auth", tags=["auth"]) - # Add request logging middleware - @app.middleware("http") - async def log_requests(request: Request, call_next): - start_time = time.time() - - # Log request - logger = logging.getLogger("uvicorn.access") - logger.debug(f"Request: {request.method} {request.url.path}") - - try: - response = await call_next(request) - - # Log response - duration = round((time.time() - start_time) * 1000, 2) - logger.debug(f"Response: {response.status_code} | Duration: {duration}ms") - - return response - except Exception as e: - # Log errors - logger.error(f"Server error: {str(e)}", exc_info=True) - raise HTTPException(status_code=500, detail="Internal Server Error") + # Include SSO router if available + try: + from routes.auth.sso import router as sso_router - # Mount static files for React app - if os.path.exists("spotizerr-ui/dist"): - app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static") - - # Serve React App - catch-all route for SPA (but not for API routes) - @app.get("/{full_path:path}") - async def serve_react_app(full_path: str): - """Serve React app with fallback to index.html for SPA routing""" - static_dir = "spotizerr-ui/dist" - - # Don't serve React app for API routes (more specific check) - if full_path.startswith("api") or full_path.startswith("api/"): - raise HTTPException(status_code=404, detail="API endpoint not found") - - # If it's a file that exists, serve it - if full_path and os.path.exists(os.path.join(static_dir, full_path)): - return FileResponse(os.path.join(static_dir, full_path)) - else: - # Fallback to index.html for SPA routing - return FileResponse(os.path.join(static_dir, "index.html")) - else: - logging.warning("React app build directory not found at spotizerr-ui/dist") + app.include_router(sso_router, prefix="/api/auth", tags=["sso"]) + logging.info("SSO functionality enabled") + except ImportError as e: + logging.warning(f"SSO functionality not available: {e}") + app.include_router(config_router, prefix="/api/config", tags=["config"]) + app.include_router(search_router, prefix="/api/search", tags=["search"]) + app.include_router( + credentials_router, prefix="/api/credentials", tags=["credentials"] + ) + app.include_router(album_router, prefix="/api/album", tags=["album"]) + app.include_router(track_router, prefix="/api/track", tags=["track"]) + app.include_router(playlist_router, prefix="/api/playlist", tags=["playlist"]) + app.include_router(artist_router, prefix="/api/artist", tags=["artist"]) + app.include_router(prgs_router, prefix="/api/prgs", tags=["progress"]) + app.include_router(history_router, prefix="/api/history", tags=["history"]) - return app + # Add request logging middleware + @app.middleware("http") + async def log_requests(request: Request, call_next): + start_time = time.time() + + # Log request + logger = logging.getLogger("uvicorn.access") + logger.debug(f"Request: {request.method} {request.url.path}") + + try: + response = await call_next(request) + + # Log response + duration = round((time.time() - start_time) * 1000, 2) + logger.debug(f"Response: {response.status_code} | Duration: {duration}ms") + + return response + except Exception as e: + # Log errors + logger.error(f"Server error: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail="Internal Server Error") + + # Mount static files for React app + if os.path.exists("spotizerr-ui/dist"): + app.mount("/static", StaticFiles(directory="spotizerr-ui/dist"), name="static") + + # Serve React App - catch-all route for SPA (but not for API routes) + @app.get("/{full_path:path}") + async def serve_react_app(full_path: str): + """Serve React app with fallback to index.html for SPA routing. Prevent directory traversal.""" + static_dir = "spotizerr-ui/dist" + static_dir_path = Path(static_dir).resolve() + index_path = static_dir_path / "index.html" + allowed_exts = { + ".html", + ".js", + ".css", + ".map", + ".png", + ".jpg", + ".jpeg", + ".svg", + ".webp", + ".ico", + ".json", + ".txt", + ".woff", + ".woff2", + ".ttf", + ".eot", + ".mp3", + ".ogg", + ".mp4", + ".webm", + } + + # Don't serve React app for API routes (more specific check) + if full_path.startswith("api") or full_path.startswith("api/"): + raise HTTPException(status_code=404, detail="API endpoint not found") + + # Reject null bytes early + if "\x00" in full_path: + return FileResponse(str(index_path)) + + # Sanitize path: normalize backslashes and strip URL schemes + sanitized = full_path.replace("\\", "/").lstrip("/") + if sanitized.startswith("http://") or sanitized.startswith("https://"): + return FileResponse(str(index_path)) + + # Resolve requested path safely and ensure it stays within static_dir + try: + requested_path = (static_dir_path / sanitized).resolve() + except Exception: + requested_path = index_path + + # If traversal attempted or non-file within static dir, fall back to index.html for SPA routing + if not str(requested_path).startswith(str(static_dir_path)): + return FileResponse(str(index_path)) + + # Disallow hidden files (starting with dot) and enforce safe extensions + if requested_path.is_file(): + name = requested_path.name + if name.startswith("."): + return FileResponse(str(index_path)) + suffix = requested_path.suffix.lower() + if suffix in allowed_exts: + return FileResponse(str(requested_path)) + # Not an allowed asset; fall back to SPA index + return FileResponse(str(index_path)) + else: + # Fallback to index.html for SPA routing + return FileResponse(str(index_path)) + else: + logging.warning("React app build directory not found at spotizerr-ui/dist") + + return app def start_celery_workers(): - """Start Celery workers with dynamic configuration""" - # This function is now handled by the lifespan context manager - # and the celery_manager.start() call - pass + """Start Celery workers with dynamic configuration""" + # This function is now handled by the lifespan context manager + # and the celery_manager.start() call + pass if __name__ == "__main__": - import uvicorn + import uvicorn - app = create_app() + app = create_app() - # Use HOST environment variable if present, otherwise fall back to IPv4 wildcard - host = os.getenv("HOST", "0.0.0.0") + # Use HOST environment variable if present, otherwise fall back to IPv4 wildcard + host = os.getenv("HOST", "0.0.0.0") - # Allow overriding port via PORT env var, with default 7171 - try: - port = int(os.getenv("PORT", "7171")) - except ValueError: - port = 7171 + # Allow overriding port via PORT env var, with default 7171 + try: + port = int(os.getenv("PORT", "7171")) + except ValueError: + port = 7171 - uvicorn.run( - app, - host=host, - port=port, - log_level="info", - access_log=True - ) + uvicorn.run(app, host=host, port=port, log_level="info", access_log=True) diff --git a/routes/core/history.py b/routes/core/history.py index e892ac0..d1f1d93 100644 --- a/routes/core/history.py +++ b/routes/core/history.py @@ -1,9 +1,8 @@ -from fastapi import APIRouter, HTTPException, Request, Depends +from fastapi import APIRouter, Request, Depends from fastapi.responses import JSONResponse -import json -import traceback import logging from routes.utils.history_manager import history_manager +from typing import Any, Dict # Import authentication dependencies from routes.auth.middleware import require_auth_from_state, User @@ -15,10 +14,12 @@ router = APIRouter() @router.get("/") @router.get("") -async def get_history(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_history( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Retrieve download history with optional filtering and pagination. - + Query parameters: - limit: Maximum number of records (default: 100, max: 500) - offset: Number of records to skip (default: 0) @@ -31,143 +32,144 @@ async def get_history(request: Request, current_user: User = Depends(require_aut offset = max(int(request.query_params.get("offset", 0)), 0) download_type = request.query_params.get("download_type") status = request.query_params.get("status") - + # Validate download_type if provided valid_types = ["track", "album", "playlist"] if download_type and download_type not in valid_types: return JSONResponse( - content={"error": f"Invalid download_type. Must be one of: {valid_types}"}, - status_code=400 + content={ + "error": f"Invalid download_type. Must be one of: {valid_types}" + }, + status_code=400, ) - + # Validate status if provided valid_statuses = ["completed", "failed", "skipped", "in_progress"] if status and status not in valid_statuses: return JSONResponse( content={"error": f"Invalid status. Must be one of: {valid_statuses}"}, - status_code=400 + status_code=400, ) - + # Get history from manager history = history_manager.get_download_history( - limit=limit, - offset=offset, - download_type=download_type, - status=status + limit=limit, offset=offset, download_type=download_type, status=status ) - + # Add pagination info - response_data = { + response_data: Dict[str, Any] = { "downloads": history, "pagination": { "limit": limit, "offset": offset, - "returned_count": len(history) - } + "returned_count": len(history), + }, } - + + filters: Dict[str, Any] = {} if download_type: - response_data["filters"] = {"download_type": download_type} + filters["download_type"] = download_type if status: - if "filters" not in response_data: - response_data["filters"] = {} - response_data["filters"]["status"] = status - - return JSONResponse( - content=response_data, - status_code=200 - ) - + filters["status"] = status + if filters: + response_data["filters"] = filters + + return JSONResponse(content=response_data, status_code=200) + except ValueError as e: return JSONResponse( - content={"error": f"Invalid parameter value: {str(e)}"}, - status_code=400 + content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400 ) except Exception as e: logger.error(f"Error retrieving download history: {e}", exc_info=True) return JSONResponse( content={"error": "Failed to retrieve download history", "details": str(e)}, - status_code=500 + status_code=500, ) @router.get("/{task_id}") -async def get_download_by_task_id(task_id: str, current_user: User = Depends(require_auth_from_state)): +async def get_download_by_task_id( + task_id: str, current_user: User = Depends(require_auth_from_state) +): """ Retrieve specific download history by task ID. - + Args: - task_id: Celery task ID + task_id: Celery task ID """ try: download = history_manager.get_download_by_task_id(task_id) - + if not download: return JSONResponse( content={"error": f"Download with task ID '{task_id}' not found"}, - status_code=404 + status_code=404, ) - - return JSONResponse( - content=download, - status_code=200 - ) - + + return JSONResponse(content=download, status_code=200) + except Exception as e: - logger.error(f"Error retrieving download for task {task_id}: {e}", exc_info=True) + logger.error( + f"Error retrieving download for task {task_id}: {e}", exc_info=True + ) return JSONResponse( content={"error": "Failed to retrieve download", "details": str(e)}, - status_code=500 + status_code=500, ) @router.get("/{task_id}/children") -async def get_download_children(task_id: str, current_user: User = Depends(require_auth_from_state)): +async def get_download_children( + task_id: str, current_user: User = Depends(require_auth_from_state) +): """ Retrieve children tracks for an album or playlist download. - + Args: - task_id: Celery task ID + task_id: Celery task ID """ try: # First get the main download to find the children table download = history_manager.get_download_by_task_id(task_id) - + if not download: return JSONResponse( content={"error": f"Download with task ID '{task_id}' not found"}, - status_code=404 + status_code=404, ) - + children_table = download.get("children_table") if not children_table: return JSONResponse( content={"error": f"Download '{task_id}' has no children tracks"}, - status_code=404 + status_code=404, ) - + # Get children tracks children = history_manager.get_children_history(children_table) - + response_data = { "task_id": task_id, "download_type": download.get("download_type"), "title": download.get("title"), "children_table": children_table, "tracks": children, - "track_count": len(children) + "track_count": len(children), } - - return JSONResponse( - content=response_data, - status_code=200 - ) - + + return JSONResponse(content=response_data, status_code=200) + except Exception as e: - logger.error(f"Error retrieving children for task {task_id}: {e}", exc_info=True) + logger.error( + f"Error retrieving children for task {task_id}: {e}", exc_info=True + ) return JSONResponse( - content={"error": "Failed to retrieve download children", "details": str(e)}, - status_code=500 + content={ + "error": "Failed to retrieve download children", + "details": str(e), + }, + status_code=500, ) @@ -178,25 +180,27 @@ async def get_download_stats(current_user: User = Depends(require_auth_from_stat """ try: stats = history_manager.get_download_stats() - - return JSONResponse( - content=stats, - status_code=200 - ) - + + return JSONResponse(content=stats, status_code=200) + except Exception as e: logger.error(f"Error retrieving download stats: {e}", exc_info=True) return JSONResponse( - content={"error": "Failed to retrieve download statistics", "details": str(e)}, - status_code=500 + content={ + "error": "Failed to retrieve download statistics", + "details": str(e), + }, + status_code=500, ) @router.get("/search") -async def search_history(request: Request, current_user: User = Depends(require_auth_from_state)): +async def search_history( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Search download history by title or artist. - + Query parameters: - q: Search query (required) - limit: Maximum number of results (default: 50, max: 200) @@ -206,147 +210,134 @@ async def search_history(request: Request, current_user: User = Depends(require_ if not query: return JSONResponse( content={"error": "Missing required parameter: q (search query)"}, - status_code=400 + status_code=400, ) - + limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200 - + # Search history results = history_manager.search_history(query, limit) - + response_data = { "query": query, "results": results, "result_count": len(results), - "limit": limit + "limit": limit, } - - return JSONResponse( - content=response_data, - status_code=200 - ) - + + return JSONResponse(content=response_data, status_code=200) + except ValueError as e: return JSONResponse( - content={"error": f"Invalid parameter value: {str(e)}"}, - status_code=400 + content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400 ) except Exception as e: logger.error(f"Error searching download history: {e}", exc_info=True) return JSONResponse( content={"error": "Failed to search download history", "details": str(e)}, - status_code=500 + status_code=500, ) @router.get("/recent") -async def get_recent_downloads(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_recent_downloads( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Get most recent downloads. - + Query parameters: - limit: Maximum number of results (default: 20, max: 100) """ try: limit = min(int(request.query_params.get("limit", 20)), 100) # Cap at 100 - + recent = history_manager.get_recent_downloads(limit) - - response_data = { - "downloads": recent, - "count": len(recent), - "limit": limit - } - - return JSONResponse( - content=response_data, - status_code=200 - ) - + + response_data = {"downloads": recent, "count": len(recent), "limit": limit} + + return JSONResponse(content=response_data, status_code=200) + except ValueError as e: return JSONResponse( - content={"error": f"Invalid parameter value: {str(e)}"}, - status_code=400 + content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400 ) except Exception as e: logger.error(f"Error retrieving recent downloads: {e}", exc_info=True) return JSONResponse( content={"error": "Failed to retrieve recent downloads", "details": str(e)}, - status_code=500 + status_code=500, ) @router.get("/failed") -async def get_failed_downloads(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_failed_downloads( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Get failed downloads. - + Query parameters: - limit: Maximum number of results (default: 50, max: 200) """ try: limit = min(int(request.query_params.get("limit", 50)), 200) # Cap at 200 - + failed = history_manager.get_failed_downloads(limit) - - response_data = { - "downloads": failed, - "count": len(failed), - "limit": limit - } - - return JSONResponse( - content=response_data, - status_code=200 - ) - + + response_data = {"downloads": failed, "count": len(failed), "limit": limit} + + return JSONResponse(content=response_data, status_code=200) + except ValueError as e: return JSONResponse( - content={"error": f"Invalid parameter value: {str(e)}"}, - status_code=400 + content={"error": f"Invalid parameter value: {str(e)}"}, status_code=400 ) except Exception as e: logger.error(f"Error retrieving failed downloads: {e}", exc_info=True) return JSONResponse( content={"error": "Failed to retrieve failed downloads", "details": str(e)}, - status_code=500 + status_code=500, ) @router.post("/cleanup") -async def cleanup_old_history(request: Request, current_user: User = Depends(require_auth_from_state)): +async def cleanup_old_history( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Clean up old download history. - + JSON body: - days_old: Number of days old to keep (default: 30) """ try: - data = await request.json() if request.headers.get("content-type") == "application/json" else {} + data = ( + await request.json() + if request.headers.get("content-type") == "application/json" + else {} + ) days_old = data.get("days_old", 30) - + if not isinstance(days_old, int) or days_old <= 0: return JSONResponse( content={"error": "days_old must be a positive integer"}, - status_code=400 + status_code=400, ) - + deleted_count = history_manager.clear_old_history(days_old) - + response_data = { - "message": f"Successfully cleaned up old download history", + "message": "Successfully cleaned up old download history", "deleted_records": deleted_count, - "days_old": days_old + "days_old": days_old, } - - return JSONResponse( - content=response_data, - status_code=200 - ) - + + return JSONResponse(content=response_data, status_code=200) + except Exception as e: logger.error(f"Error cleaning up old history: {e}", exc_info=True) return JSONResponse( content={"error": "Failed to cleanup old history", "details": str(e)}, - status_code=500 - ) \ No newline at end of file + status_code=500, + ) From 9830a46ebb7d6ae2deb205a7122c011b91d41664 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 09:40:14 -0600 Subject: [PATCH 06/14] chore(migrations): add v3.1.2 dummy migration (no schema changes) --- routes/migrations/runner.py | 415 ++++++++++++++++++++---------------- routes/migrations/v3_1_2.py | 42 ++++ 2 files changed, 278 insertions(+), 179 deletions(-) create mode 100644 routes/migrations/v3_1_2.py diff --git a/routes/migrations/runner.py b/routes/migrations/runner.py index 887ddd1..f9117af 100644 --- a/routes/migrations/runner.py +++ b/routes/migrations/runner.py @@ -6,6 +6,7 @@ from typing import Optional from .v3_0_6 import MigrationV3_0_6 from .v3_1_0 import MigrationV3_1_0 from .v3_1_1 import MigrationV3_1_1 +from .v3_1_2 import MigrationV3_1_2 logger = logging.getLogger(__name__) @@ -23,138 +24,142 @@ SEARCH_JSON = CREDS_DIR / "search.json" # Expected children table columns for history (album_/playlist_) CHILDREN_EXPECTED_COLUMNS: dict[str, str] = { - "id": "INTEGER PRIMARY KEY AUTOINCREMENT", - "title": "TEXT NOT NULL", - "artists": "TEXT", - "album_title": "TEXT", - "duration_ms": "INTEGER", - "track_number": "INTEGER", - "disc_number": "INTEGER", - "explicit": "BOOLEAN", - "status": "TEXT NOT NULL", - "external_ids": "TEXT", - "genres": "TEXT", - "isrc": "TEXT", - "timestamp": "REAL NOT NULL", - "position": "INTEGER", - "metadata": "TEXT", + "id": "INTEGER PRIMARY KEY AUTOINCREMENT", + "title": "TEXT NOT NULL", + "artists": "TEXT", + "album_title": "TEXT", + "duration_ms": "INTEGER", + "track_number": "INTEGER", + "disc_number": "INTEGER", + "explicit": "BOOLEAN", + "status": "TEXT NOT NULL", + "external_ids": "TEXT", + "genres": "TEXT", + "isrc": "TEXT", + "timestamp": "REAL NOT NULL", + "position": "INTEGER", + "metadata": "TEXT", } # 3.1.2 expected schemas for Watch DBs (kept here to avoid importing modules with side-effects) EXPECTED_WATCHED_PLAYLISTS_COLUMNS: dict[str, str] = { - "spotify_id": "TEXT PRIMARY KEY", - "name": "TEXT", - "owner_id": "TEXT", - "owner_name": "TEXT", - "total_tracks": "INTEGER", - "link": "TEXT", - "snapshot_id": "TEXT", - "last_checked": "INTEGER", - "added_at": "INTEGER", - "is_active": "INTEGER DEFAULT 1", + "spotify_id": "TEXT PRIMARY KEY", + "name": "TEXT", + "owner_id": "TEXT", + "owner_name": "TEXT", + "total_tracks": "INTEGER", + "link": "TEXT", + "snapshot_id": "TEXT", + "last_checked": "INTEGER", + "added_at": "INTEGER", + "is_active": "INTEGER DEFAULT 1", } EXPECTED_PLAYLIST_TRACKS_COLUMNS: dict[str, str] = { - "spotify_track_id": "TEXT PRIMARY KEY", - "title": "TEXT", - "artist_names": "TEXT", - "album_name": "TEXT", - "album_artist_names": "TEXT", - "track_number": "INTEGER", - "album_spotify_id": "TEXT", - "duration_ms": "INTEGER", - "added_at_playlist": "TEXT", - "added_to_db": "INTEGER", - "is_present_in_spotify": "INTEGER DEFAULT 1", - "last_seen_in_spotify": "INTEGER", - "snapshot_id": "TEXT", - "final_path": "TEXT", + "spotify_track_id": "TEXT PRIMARY KEY", + "title": "TEXT", + "artist_names": "TEXT", + "album_name": "TEXT", + "album_artist_names": "TEXT", + "track_number": "INTEGER", + "album_spotify_id": "TEXT", + "duration_ms": "INTEGER", + "added_at_playlist": "TEXT", + "added_to_db": "INTEGER", + "is_present_in_spotify": "INTEGER DEFAULT 1", + "last_seen_in_spotify": "INTEGER", + "snapshot_id": "TEXT", + "final_path": "TEXT", } EXPECTED_WATCHED_ARTISTS_COLUMNS: dict[str, str] = { - "spotify_id": "TEXT PRIMARY KEY", - "name": "TEXT", - "link": "TEXT", - "total_albums_on_spotify": "INTEGER", - "last_checked": "INTEGER", - "added_at": "INTEGER", - "is_active": "INTEGER DEFAULT 1", - "genres": "TEXT", - "popularity": "INTEGER", - "image_url": "TEXT", + "spotify_id": "TEXT PRIMARY KEY", + "name": "TEXT", + "link": "TEXT", + "total_albums_on_spotify": "INTEGER", + "last_checked": "INTEGER", + "added_at": "INTEGER", + "is_active": "INTEGER DEFAULT 1", + "genres": "TEXT", + "popularity": "INTEGER", + "image_url": "TEXT", } EXPECTED_ARTIST_ALBUMS_COLUMNS: dict[str, str] = { - "album_spotify_id": "TEXT PRIMARY KEY", - "artist_spotify_id": "TEXT", - "name": "TEXT", - "album_group": "TEXT", - "album_type": "TEXT", - "release_date": "TEXT", - "release_date_precision": "TEXT", - "total_tracks": "INTEGER", - "link": "TEXT", - "image_url": "TEXT", - "added_to_db": "INTEGER", - "last_seen_on_spotify": "INTEGER", - "download_task_id": "TEXT", - "download_status": "INTEGER DEFAULT 0", - "is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0", + "album_spotify_id": "TEXT PRIMARY KEY", + "artist_spotify_id": "TEXT", + "name": "TEXT", + "album_group": "TEXT", + "album_type": "TEXT", + "release_date": "TEXT", + "release_date_precision": "TEXT", + "total_tracks": "INTEGER", + "link": "TEXT", + "image_url": "TEXT", + "added_to_db": "INTEGER", + "last_seen_on_spotify": "INTEGER", + "download_task_id": "TEXT", + "download_status": "INTEGER DEFAULT 0", + "is_fully_downloaded_managed_by_app": "INTEGER DEFAULT 0", } m306 = MigrationV3_0_6() m310 = MigrationV3_1_0() m311 = MigrationV3_1_1() +m312 = MigrationV3_1_2() def _safe_connect(path: Path) -> Optional[sqlite3.Connection]: - try: - path.parent.mkdir(parents=True, exist_ok=True) - conn = sqlite3.connect(str(path)) - conn.row_factory = sqlite3.Row - return conn - except Exception as e: - logger.error(f"Failed to open SQLite DB {path}: {e}") - return None + try: + path.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(str(path)) + conn.row_factory = sqlite3.Row + return conn + except Exception as e: + logger.error(f"Failed to open SQLite DB {path}: {e}") + return None def _ensure_table_schema( - conn: sqlite3.Connection, - table_name: str, - expected_columns: dict[str, str], - table_description: str, + conn: sqlite3.Connection, + table_name: str, + expected_columns: dict[str, str], + table_description: str, ) -> None: - try: - cur = conn.execute(f"PRAGMA table_info({table_name})") - existing_info = cur.fetchall() - existing_names = {row[1] for row in existing_info} - for col_name, col_type in expected_columns.items(): - if col_name in existing_names: - continue - col_type_for_add = ( - col_type.replace("PRIMARY KEY", "").replace("AUTOINCREMENT", "").replace("NOT NULL", "").strip() - ) - try: - conn.execute( - f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}" - ) - logger.info( - f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'." - ) - except sqlite3.OperationalError as e: - logger.warning( - f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}" - ) - except Exception as e: - logger.error( - f"Error ensuring schema for {table_description} table '{table_name}': {e}", - exc_info=True, - ) + try: + cur = conn.execute(f"PRAGMA table_info({table_name})") + existing_info = cur.fetchall() + existing_names = {row[1] for row in existing_info} + for col_name, col_type in expected_columns.items(): + if col_name in existing_names: + continue + col_type_for_add = ( + col_type.replace("PRIMARY KEY", "") + .replace("AUTOINCREMENT", "") + .replace("NOT NULL", "") + .strip() + ) + try: + conn.execute( + f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}" + ) + logger.info( + f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'." + ) + except sqlite3.OperationalError as e: + logger.warning( + f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}" + ) + except Exception as e: + logger.error( + f"Error ensuring schema for {table_description} table '{table_name}': {e}", + exc_info=True, + ) def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) -> None: - conn.execute( - f""" + conn.execute( + f""" CREATE TABLE IF NOT EXISTS {table_name} ( id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL, @@ -173,62 +178,73 @@ def _create_or_update_children_table(conn: sqlite3.Connection, table_name: str) metadata TEXT ) """ - ) - _ensure_table_schema(conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history") + ) + _ensure_table_schema( + conn, table_name, CHILDREN_EXPECTED_COLUMNS, "children history" + ) def _update_children_tables_for_history(conn: sqlite3.Connection) -> None: - try: - try: - cur = conn.execute( - "SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''" - ) - for row in cur.fetchall(): - table_name = row[0] - if not table_name: - continue - _create_or_update_children_table(conn, table_name) - except sqlite3.Error as e: - logger.warning(f"Failed to scan referenced children tables from main history: {e}") + try: + try: + cur = conn.execute( + "SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''" + ) + for row in cur.fetchall(): + table_name = row[0] + if not table_name: + continue + _create_or_update_children_table(conn, table_name) + except sqlite3.Error as e: + logger.warning( + f"Failed to scan referenced children tables from main history: {e}" + ) - try: - cur = conn.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'" - ) - for row in cur.fetchall(): - table_name = row[0] - _create_or_update_children_table(conn, table_name) - except sqlite3.Error as e: - logger.warning(f"Failed to scan legacy children tables in history DB: {e}") - logger.info("Children history tables migration ensured") - except Exception: - logger.error("Failed migrating children history tables", exc_info=True) + try: + cur = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'" + ) + for row in cur.fetchall(): + table_name = row[0] + _create_or_update_children_table(conn, table_name) + except sqlite3.Error as e: + logger.warning(f"Failed to scan legacy children tables in history DB: {e}") + logger.info("Children history tables migration ensured") + except Exception: + logger.error("Failed migrating children history tables", exc_info=True) def _ensure_creds_filesystem() -> None: - try: - BLOBS_DIR.mkdir(parents=True, exist_ok=True) - if not SEARCH_JSON.exists(): - SEARCH_JSON.write_text('{ "client_id": "", "client_secret": "" }\n', encoding="utf-8") - logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}") - except Exception: - logger.error("Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True) + try: + BLOBS_DIR.mkdir(parents=True, exist_ok=True) + if not SEARCH_JSON.exists(): + SEARCH_JSON.write_text( + '{ "client_id": "", "client_secret": "" }\n', encoding="utf-8" + ) + logger.info(f"Created default global Spotify creds file at {SEARCH_JSON}") + except Exception: + logger.error( + "Failed to ensure credentials filesystem (blobs/search.json)", exc_info=True + ) -def _apply_versioned_updates(conn: sqlite3.Connection, c_base, u_base, post_update=None) -> None: - if not c_base(conn): - u_base(conn) - if post_update: - post_update(conn) +def _apply_versioned_updates( + conn: sqlite3.Connection, c_base, u_base, post_update=None +) -> None: + if not c_base(conn): + u_base(conn) + if post_update: + post_update(conn) # --- 3.1.2 upgrade helpers for Watch DBs --- + def _update_watch_playlists_db(conn: sqlite3.Connection) -> None: - try: - # Ensure core watched_playlists table exists and has expected schema - conn.execute( - """ + try: + # Ensure core watched_playlists table exists and has expected schema + conn.execute( + """ CREATE TABLE IF NOT EXISTS watched_playlists ( spotify_id TEXT PRIMARY KEY, name TEXT, @@ -242,15 +258,22 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None: is_active INTEGER DEFAULT 1 ) """ - ) - _ensure_table_schema(conn, "watched_playlists", EXPECTED_WATCHED_PLAYLISTS_COLUMNS, "watched playlists") + ) + _ensure_table_schema( + conn, + "watched_playlists", + EXPECTED_WATCHED_PLAYLISTS_COLUMNS, + "watched playlists", + ) - # Upgrade all dynamic playlist_ tables - cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'") - for row in cur.fetchall(): - table_name = row[0] - conn.execute( - f""" + # Upgrade all dynamic playlist_ tables + cur = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'playlist_%'" + ) + for row in cur.fetchall(): + table_name = row[0] + conn.execute( + f""" CREATE TABLE IF NOT EXISTS {table_name} ( spotify_track_id TEXT PRIMARY KEY, title TEXT, @@ -268,18 +291,25 @@ def _update_watch_playlists_db(conn: sqlite3.Connection) -> None: final_path TEXT ) """ - ) - _ensure_table_schema(conn, table_name, EXPECTED_PLAYLIST_TRACKS_COLUMNS, f"playlist tracks ({table_name})") - logger.info("Upgraded watch playlists DB to 3.1.2 schema") - except Exception: - logger.error("Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True) + ) + _ensure_table_schema( + conn, + table_name, + EXPECTED_PLAYLIST_TRACKS_COLUMNS, + f"playlist tracks ({table_name})", + ) + logger.info("Upgraded watch playlists DB to 3.1.2 schema") + except Exception: + logger.error( + "Failed to upgrade watch playlists DB to 3.1.2 schema", exc_info=True + ) def _update_watch_artists_db(conn: sqlite3.Connection) -> None: - try: - # Ensure core watched_artists table exists and has expected schema - conn.execute( - """ + try: + # Ensure core watched_artists table exists and has expected schema + conn.execute( + """ CREATE TABLE IF NOT EXISTS watched_artists ( spotify_id TEXT PRIMARY KEY, name TEXT, @@ -293,15 +323,19 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None: image_url TEXT ) """ - ) - _ensure_table_schema(conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists") + ) + _ensure_table_schema( + conn, "watched_artists", EXPECTED_WATCHED_ARTISTS_COLUMNS, "watched artists" + ) - # Upgrade all dynamic artist_ tables - cur = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'") - for row in cur.fetchall(): - table_name = row[0] - conn.execute( - f""" + # Upgrade all dynamic artist_ tables + cur = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'artist_%'" + ) + for row in cur.fetchall(): + table_name = row[0] + conn.execute( + f""" CREATE TABLE IF NOT EXISTS {table_name} ( album_spotify_id TEXT PRIMARY KEY, artist_spotify_id TEXT, @@ -320,11 +354,18 @@ def _update_watch_artists_db(conn: sqlite3.Connection) -> None: is_fully_downloaded_managed_by_app INTEGER DEFAULT 0 ) """ - ) - _ensure_table_schema(conn, table_name, EXPECTED_ARTIST_ALBUMS_COLUMNS, f"artist albums ({table_name})") - logger.info("Upgraded watch artists DB to 3.1.2 schema") - except Exception: - logger.error("Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True) + ) + _ensure_table_schema( + conn, + table_name, + EXPECTED_ARTIST_ALBUMS_COLUMNS, + f"artist albums ({table_name})", + ) + logger.info("Upgraded watch artists DB to 3.1.2 schema") + except Exception: + logger.error( + "Failed to upgrade watch artists DB to 3.1.2 schema", exc_info=True + ) def run_migrations_if_needed(): @@ -343,6 +384,7 @@ def run_migrations_if_needed(): post_update=_update_children_tables_for_history, ) _apply_versioned_updates(conn, m311.check_history, m311.update_history) + _apply_versioned_updates(conn, m312.check_history, m312.update_history) conn.commit() # Watch playlists DB @@ -358,6 +400,11 @@ def run_migrations_if_needed(): m311.check_watch_playlists, m311.update_watch_playlists, ) + _apply_versioned_updates( + conn, + m312.check_watch_playlists, + m312.update_watch_playlists, + ) _update_watch_playlists_db(conn) conn.commit() @@ -374,18 +421,28 @@ def run_migrations_if_needed(): _apply_versioned_updates( conn, m311.check_watch_artists, m311.update_watch_artists ) + _apply_versioned_updates( + conn, m312.check_watch_artists, m312.update_watch_artists + ) _update_watch_artists_db(conn) conn.commit() # Accounts DB with _safe_connect(ACCOUNTS_DB) as conn: if conn: - _apply_versioned_updates(conn, m306.check_accounts, m306.update_accounts) - _apply_versioned_updates(conn, m311.check_accounts, m311.update_accounts) + _apply_versioned_updates( + conn, m306.check_accounts, m306.update_accounts + ) + _apply_versioned_updates( + conn, m311.check_accounts, m311.update_accounts + ) + _apply_versioned_updates( + conn, m312.check_accounts, m312.update_accounts + ) conn.commit() except Exception as e: logger.error("Error during migration: %s", e, exc_info=True) else: _ensure_creds_filesystem() - logger.info("Database migrations check completed") \ No newline at end of file + logger.info("Database migrations check completed") diff --git a/routes/migrations/v3_1_2.py b/routes/migrations/v3_1_2.py new file mode 100644 index 0000000..1e70fc9 --- /dev/null +++ b/routes/migrations/v3_1_2.py @@ -0,0 +1,42 @@ +import sqlite3 + + +class MigrationV3_1_2: + """ + Dummy migration for version 3.1.2. + No database schema changes were made between these versions. + This class serves as a placeholder to ensure the migration runner + is aware of this version and can proceed without errors. + """ + + def check_history(self, conn: sqlite3.Connection) -> bool: + # No changes, so migration is not needed. + return True + + def update_history(self, conn: sqlite3.Connection) -> None: + # No-op + pass + + def check_watch_artists(self, conn: sqlite3.Connection) -> bool: + # No changes, so migration is not needed. + return True + + def update_watch_artists(self, conn: sqlite3.Connection) -> None: + # No-op + pass + + def check_watch_playlists(self, conn: sqlite3.Connection) -> bool: + # No changes, so migration is not needed. + return True + + def update_watch_playlists(self, conn: sqlite3.Connection) -> None: + # No-op + pass + + def check_accounts(self, conn: sqlite3.Connection) -> bool: + # No changes, so migration is not needed. + return True + + def update_accounts(self, conn: sqlite3.Connection) -> None: + # No-op + pass From 93f8a019ccb2a3508aed314ed764c92bb59a64a4 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 14:37:14 -0600 Subject: [PATCH 07/14] fix: #273 --- routes/content/artist.py | 8 +- routes/utils/get_info.py | 2 +- spotizerr-ui/package.json | 2 +- spotizerr-ui/src/routes/artist.tsx | 244 ++++++++++++++++++++++------- 4 files changed, 191 insertions(+), 65 deletions(-) diff --git a/routes/content/artist.py b/routes/content/artist.py index e967ca8..b4c5302 100644 --- a/routes/content/artist.py +++ b/routes/content/artist.py @@ -2,7 +2,7 @@ Artist endpoint router. """ -from fastapi import APIRouter, HTTPException, Request, Depends +from fastapi import APIRouter, HTTPException, Request, Depends, Query from fastapi.responses import JSONResponse import json import traceback @@ -118,7 +118,9 @@ async def cancel_artist_download(): @router.get("/info") async def get_artist_info( - request: Request, current_user: User = Depends(require_auth_from_state) + request: Request, current_user: User = Depends(require_auth_from_state), + limit: int = Query(10, ge=1), # default=10, must be >=1 + offset: int = Query(0, ge=0) # default=0, must be >=0 ): """ Retrieves Spotify artist metadata given a Spotify artist ID. @@ -134,7 +136,7 @@ async def get_artist_info( artist_metadata = get_spotify_info(spotify_id, "artist") # Get artist discography for albums - artist_discography = get_spotify_info(spotify_id, "artist_discography") + artist_discography = get_spotify_info(spotify_id, "artist_discography", limit=limit, offset=offset) # Combine metadata with discography artist_info = {**artist_metadata, "albums": artist_discography} diff --git a/routes/utils/get_info.py b/routes/utils/get_info.py index c027fc9..7729b17 100644 --- a/routes/utils/get_info.py +++ b/routes/utils/get_info.py @@ -269,7 +269,7 @@ def get_spotify_info( elif spotify_type == "artist_discography": # Get artist's albums with pagination albums = client.artist_albums( - spotify_id, limit=limit or 20, offset=offset or 0 + spotify_id, limit=limit or 20, offset=offset or 0, include_groups="single,album,appears_on" ) return albums diff --git a/spotizerr-ui/package.json b/spotizerr-ui/package.json index 5e81c13..54ed741 100644 --- a/spotizerr-ui/package.json +++ b/spotizerr-ui/package.json @@ -1,7 +1,7 @@ { "name": "spotizerr-ui", "private": true, - "version": "3.1.2", + "version": "3.2.0", "type": "module", "scripts": { "dev": "vite", diff --git a/spotizerr-ui/src/routes/artist.tsx b/spotizerr-ui/src/routes/artist.tsx index 7664216..7a59725 100644 --- a/spotizerr-ui/src/routes/artist.tsx +++ b/spotizerr-ui/src/routes/artist.tsx @@ -1,5 +1,5 @@ import { Link, useParams } from "@tanstack/react-router"; -import { useEffect, useState, useContext } from "react"; +import { useEffect, useState, useContext, useRef, useCallback } from "react"; import { toast } from "sonner"; import apiClient from "../lib/api-client"; import type { AlbumType, ArtistType, TrackType } from "../types/spotify"; @@ -18,58 +18,170 @@ export const Artist = () => { const context = useContext(QueueContext); const { settings } = useSettings(); + const sentinelRef = useRef(null); + + // Pagination state + const LIMIT = 20; // tune as you like + const [offset, setOffset] = useState(0); + const [loading, setLoading] = useState(false); + const [loadingMore, setLoadingMore] = useState(false); + const [hasMore, setHasMore] = useState(true); // assume more until we learn otherwise + if (!context) { throw new Error("useQueue must be used within a QueueProvider"); } const { addItem } = context; + const applyFilters = useCallback( + (items: AlbumType[]) => { + return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true)); + }, + [settings?.explicitFilter] + ); + + // Helper to dedupe albums by id + const dedupeAppendAlbums = (current: AlbumType[], incoming: AlbumType[]) => { + const seen = new Set(current.map((a) => a.id)); + const filtered = incoming.filter((a) => !seen.has(a.id)); + return current.concat(filtered); + }; + + // Fetch artist info & first page of albums useEffect(() => { - const fetchArtistData = async () => { - if (!artistId) return; + if (!artistId) return; + + let cancelled = false; + + const fetchInitial = async () => { + setLoading(true); + setError(null); + setAlbums([]); + setOffset(0); + setHasMore(true); + try { - const response = await apiClient.get(`/artist/info?id=${artistId}`); - const artistData = response.data; + const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=0`); + const data = resp.data; - // Check if we have artist data in the response - if (artistData?.id && artistData?.name) { - // Set artist info directly from the response - setArtist({ - id: artistData.id, - name: artistData.name, - images: artistData.images || [], - external_urls: artistData.external_urls || { spotify: "" }, - followers: artistData.followers || { total: 0 }, - genres: artistData.genres || [], - popularity: artistData.popularity || 0, - type: artistData.type || 'artist', - uri: artistData.uri || '' - }); + if (cancelled) return; - // Check if we have albums data - if (artistData?.albums?.items && artistData.albums.items.length > 0) { - setAlbums(artistData.albums.items); + if (data?.id && data?.name) { + // set artist meta + setArtist({ + id: data.id, + name: data.name, + images: data.images || [], + external_urls: data.external_urls || { spotify: "" }, + followers: data.followers || { total: 0 }, + genres: data.genres || [], + popularity: data.popularity || 0, + type: data.type || "artist", + uri: data.uri || "", + }); + + // top tracks (if provided) + if (Array.isArray(data.top_tracks)) { + setTopTracks(data.top_tracks); } else { - setError("No albums found for this artist."); - return; + setTopTracks([]); + } + + // albums pagination info + const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; + const total: number | undefined = data?.albums?.total; + + setAlbums(items); + setOffset(items.length); + if (typeof total === "number") { + setHasMore(items.length < total); + } else { + // If server didn't return total, default behavior: stop when an empty page arrives. + setHasMore(items.length > 0); } } else { setError("Could not load artist data."); - return; } - setTopTracks([]); - - const watchStatusResponse = await apiClient.get<{ is_watched: boolean }>(`/artist/watch/${artistId}/status`); - setIsWatched(watchStatusResponse.data.is_watched); + // fetch watch status + try { + const watchStatusResponse = await apiClient.get<{ is_watched: boolean }>(`/artist/watch/${artistId}/status`); + if (!cancelled) setIsWatched(watchStatusResponse.data.is_watched); + } catch (e) { + // ignore watch status errors + console.warn("Failed to load watch status", e); + } } catch (err) { - setError("Failed to load artist page"); - console.error(err); + if (!cancelled) { + console.error(err); + setError("Failed to load artist page"); + } + } finally { + if (!cancelled) setLoading(false); } }; - fetchArtistData(); - }, [artistId]); + fetchInitial(); + return () => { + cancelled = true; + }; + }, [artistId, LIMIT]); + + // Fetch more albums (next page) + const fetchMoreAlbums = useCallback(async () => { + if (!artistId || loadingMore || loading || !hasMore) return; + setLoadingMore(true); + + try { + const resp = await apiClient.get(`/artist/info?id=${artistId}&limit=${LIMIT}&offset=${offset}`); + const data = resp.data; + const items: AlbumType[] = (data?.albums?.items as AlbumType[]) || []; + const total: number | undefined = data?.albums?.total; + + setAlbums((cur) => dedupeAppendAlbums(cur, items)); + setOffset((cur) => cur + items.length); + + if (typeof total === "number") { + setHasMore((prev) => prev && offset + items.length < total); + } else { + // if server doesn't expose total, stop when we get fewer than LIMIT items + setHasMore(items.length === LIMIT); + } + } catch (err) { + console.error("Failed to load more albums", err); + toast.error("Failed to load more albums"); + setHasMore(false); + } finally { + setLoadingMore(false); + } + }, [artistId, offset, LIMIT, loadingMore, loading, hasMore]); + + // IntersectionObserver to trigger fetchMoreAlbums when sentinel is visible + useEffect(() => { + const sentinel = sentinelRef.current; + if (!sentinel) return; + if (!hasMore) return; + + const observer = new IntersectionObserver( + (entries) => { + entries.forEach((entry) => { + if (entry.isIntersecting) { + fetchMoreAlbums(); + } + }); + }, + { + root: null, + rootMargin: "400px", // start loading a bit before the sentinel enters viewport + threshold: 0.1, + } + ); + + observer.observe(sentinel); + return () => observer.disconnect(); + }, [fetchMoreAlbums, hasMore]); + + // --- existing handlers (unchanged) --- const handleDownloadTrack = (track: TrackType) => { if (!track.id) return; toast.info(`Adding ${track.name} to queue...`); @@ -83,31 +195,25 @@ export const Artist = () => { const handleDownloadArtist = async () => { if (!artistId || !artist) return; - + try { toast.info(`Downloading ${artist.name} discography...`); - + // Call the artist download endpoint which returns album task IDs const response = await apiClient.get(`/artist/download/${artistId}`); - + if (response.data.queued_albums?.length > 0) { - toast.success( - `${artist.name} discography queued successfully!`, - { - description: `${response.data.queued_albums.length} albums added to queue.`, - } - ); + toast.success(`${artist.name} discography queued successfully!`, { + description: `${response.data.queued_albums.length} albums added to queue.`, + }); } else { toast.info("No new albums to download for this artist."); } } catch (error: any) { console.error("Artist download failed:", error); - toast.error( - "Failed to download artist", - { - description: error.response?.data?.error || "An unexpected error occurred.", - } - ); + toast.error("Failed to download artist", { + description: error.response?.data?.error || "An unexpected error occurred.", + }); } }; @@ -132,18 +238,14 @@ export const Artist = () => { return
{error}
; } - if (!artist) { + if (loading && !artist) { return
Loading...
; } - if (!artist.name) { + if (!artist) { return
Artist data could not be fully loaded. Please try again later.
; } - const applyFilters = (items: AlbumType[]) => { - return items.filter((item) => (settings?.explicitFilter ? !item.explicit : true)); - }; - const artistAlbums = applyFilters(albums.filter((album) => album.album_type === "album")); const artistSingles = applyFilters(albums.filter((album) => album.album_type === "single")); const artistCompilations = applyFilters(albums.filter((album) => album.album_type === "compilation")); @@ -178,11 +280,10 @@ export const Artist = () => {
)} + {/* Albums */} {artistAlbums.length > 0 && (

Albums

@@ -234,6 +340,7 @@ export const Artist = () => {
)} + {/* Singles */} {artistSingles.length > 0 && (

Singles

@@ -245,6 +352,7 @@ export const Artist = () => {
)} + {/* Compilations */} {artistCompilations.length > 0 && (

Compilations

@@ -255,6 +363,22 @@ export const Artist = () => {
)} + + {/* sentinel + loading */} +
+ {loadingMore &&
Loading more...
} + {!hasMore && !loading &&
End of discography
} + {/* fallback load more button for browsers that block IntersectionObserver or for manual control */} + {hasMore && !loadingMore && ( + + )} +
+
); }; From cf6d367915a5c65ec61b08caa7a9c52546327c88 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 21:26:14 -0600 Subject: [PATCH 08/14] feat: Add real_time_multiplier to backend --- routes/content/album.py | 69 +++++++++++++++-------- routes/system/config.py | 23 ++++++++ routes/utils/album.py | 3 + routes/utils/celery_config.py | 2 + routes/utils/celery_queue_manager.py | 19 +++++-- routes/utils/celery_tasks.py | 12 ++++ routes/utils/get_info.py | 13 ++++- routes/utils/playlist.py | 3 + routes/utils/track.py | 3 + spotizerr-ui/src/routes/album.tsx | 83 +++++++++++++++++++++++++--- 10 files changed, 191 insertions(+), 39 deletions(-) diff --git a/routes/content/album.py b/routes/content/album.py index ba797d2..de98864 100755 --- a/routes/content/album.py +++ b/routes/content/album.py @@ -1,6 +1,5 @@ -from fastapi import APIRouter, HTTPException, Request, Depends +from fastapi import APIRouter, Request, Depends from fastapi.responses import JSONResponse -import json import traceback import uuid import time @@ -21,7 +20,11 @@ def construct_spotify_url(item_id: str, item_type: str = "track") -> str: @router.get("/download/{album_id}") -async def handle_download(album_id: str, request: Request, current_user: User = Depends(require_auth_from_state)): +async def handle_download( + album_id: str, + request: Request, + current_user: User = Depends(require_auth_from_state), +): # Retrieve essential parameters from the request. # name = request.args.get('name') # artist = request.args.get('artist') @@ -38,8 +41,10 @@ async def handle_download(album_id: str, request: Request, current_user: User = or not album_info.get("artists") ): return JSONResponse( - content={"error": f"Could not retrieve metadata for album ID: {album_id}"}, - status_code=404 + content={ + "error": f"Could not retrieve metadata for album ID: {album_id}" + }, + status_code=404, ) name_from_spotify = album_info.get("name") @@ -51,15 +56,16 @@ async def handle_download(album_id: str, request: Request, current_user: User = except Exception as e: return JSONResponse( - content={"error": f"Failed to fetch metadata for album {album_id}: {str(e)}"}, - status_code=500 + content={ + "error": f"Failed to fetch metadata for album {album_id}: {str(e)}" + }, + status_code=500, ) # Validate required parameters if not url: return JSONResponse( - content={"error": "Missing required parameter: url"}, - status_code=400 + content={"error": "Missing required parameter: url"}, status_code=400 ) # Add the task to the queue with only essential parameters @@ -84,7 +90,7 @@ async def handle_download(album_id: str, request: Request, current_user: User = "error": "Duplicate download detected.", "existing_task": e.existing_task, }, - status_code=409 + status_code=409, ) except Exception as e: # Generic error handling for other issues during task submission @@ -116,25 +122,23 @@ async def handle_download(album_id: str, request: Request, current_user: User = "error": f"Failed to queue album download: {str(e)}", "task_id": error_task_id, }, - status_code=500 + status_code=500, ) - return JSONResponse( - content={"task_id": task_id}, - status_code=202 - ) + return JSONResponse(content={"task_id": task_id}, status_code=202) @router.get("/download/cancel") -async def cancel_download(request: Request, current_user: User = Depends(require_auth_from_state)): +async def cancel_download( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Cancel a running download process by its task id. """ task_id = request.query_params.get("task_id") if not task_id: return JSONResponse( - content={"error": "Missing process id (task_id) parameter"}, - status_code=400 + content={"error": "Missing process id (task_id) parameter"}, status_code=400 ) # Use the queue manager's cancellation method. @@ -145,7 +149,9 @@ async def cancel_download(request: Request, current_user: User = Depends(require @router.get("/info") -async def get_album_info(request: Request, current_user: User = Depends(require_auth_from_state)): +async def get_album_info( + request: Request, current_user: User = Depends(require_auth_from_state) +): """ Retrieve Spotify album metadata given a Spotify album ID. Expects a query parameter 'id' that contains the Spotify album ID. @@ -153,15 +159,30 @@ async def get_album_info(request: Request, current_user: User = Depends(require_ spotify_id = request.query_params.get("id") if not spotify_id: - return JSONResponse( - content={"error": "Missing parameter: id"}, - status_code=400 - ) + return JSONResponse(content={"error": "Missing parameter: id"}, status_code=400) try: - # Use the get_spotify_info function (already imported at top) + # Optional pagination params for tracks + limit_param = request.query_params.get("limit") + offset_param = request.query_params.get("offset") + limit = int(limit_param) if limit_param is not None else None + offset = int(offset_param) if offset_param is not None else None + + # Fetch album metadata album_info = get_spotify_info(spotify_id, "album") + # Fetch album tracks with pagination + album_tracks = get_spotify_info( + spotify_id, "album_tracks", limit=limit, offset=offset + ) + + # Merge tracks into album payload in the same shape Spotify returns on album + album_info["tracks"] = album_tracks + return JSONResponse(content=album_info, status_code=200) + except ValueError as ve: + return JSONResponse( + content={"error": f"Invalid limit/offset: {str(ve)}"}, status_code=400 + ) except Exception as e: error_data = {"error": str(e), "traceback": traceback.format_exc()} return JSONResponse(content=error_data, status_code=500) diff --git a/routes/system/config.py b/routes/system/config.py index ecf7f0c..95fead3 100644 --- a/routes/system/config.py +++ b/routes/system/config.py @@ -72,6 +72,28 @@ def validate_config(config_data: dict, watch_config: dict = None) -> tuple[bool, if watch_config is None: watch_config = get_watch_config_http() + # Ensure realTimeMultiplier is a valid integer in range 0..10 if provided + if "realTimeMultiplier" in config_data or "real_time_multiplier" in config_data: + key = ( + "realTimeMultiplier" + if "realTimeMultiplier" in config_data + else "real_time_multiplier" + ) + val = config_data.get(key) + if isinstance(val, bool): + return False, "realTimeMultiplier must be an integer between 0 and 10." + try: + ival = int(val) + except Exception: + return False, "realTimeMultiplier must be an integer between 0 and 10." + if ival < 0 or ival > 10: + return False, "realTimeMultiplier must be between 0 and 10." + # Normalize to camelCase in the working dict so save_config writes it + if key == "real_time_multiplier": + config_data["realTimeMultiplier"] = ival + else: + config_data["realTimeMultiplier"] = ival + # Check if fallback is enabled but missing required accounts if config_data.get("fallback", False): has_spotify = has_credentials("spotify") @@ -169,6 +191,7 @@ def _migrate_legacy_keys_inplace(cfg: dict) -> bool: "artist_separator": "artistSeparator", "recursive_quality": "recursiveQuality", "spotify_metadata": "spotifyMetadata", + "real_time_multiplier": "realTimeMultiplier", } modified = False for legacy, camel in legacy_map.items(): diff --git a/routes/utils/album.py b/routes/utils/album.py index 23c7c06..b81e0fa 100755 --- a/routes/utils/album.py +++ b/routes/utils/album.py @@ -31,6 +31,7 @@ def download_album( recursive_quality=True, spotify_metadata=True, _is_celery_task_execution=False, # Added to skip duplicate check from Celery task + real_time_multiplier=None, ): if not _is_celery_task_execution: existing_task = get_existing_task_id( @@ -173,6 +174,7 @@ def download_album( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: album.py - Spotify direct download (account: {main} for blob) successful." @@ -228,6 +230,7 @@ def download_album( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: album.py - Direct Spotify download (account: {main} for blob) successful." diff --git a/routes/utils/celery_config.py b/routes/utils/celery_config.py index 5eeb64a..f95812e 100644 --- a/routes/utils/celery_config.py +++ b/routes/utils/celery_config.py @@ -49,6 +49,7 @@ DEFAULT_MAIN_CONFIG = { "spotifyMetadata": True, "separateTracksByUser": False, "watch": {}, + "realTimeMultiplier": 0, } @@ -63,6 +64,7 @@ def _migrate_legacy_keys(cfg: dict) -> tuple[dict, bool]: "artist_separator": "artistSeparator", "recursive_quality": "recursiveQuality", "spotify_metadata": "spotifyMetadata", + "real_time_multiplier": "realTimeMultiplier", } for legacy, camel in legacy_map.items(): if legacy in out and camel not in out: diff --git a/routes/utils/celery_queue_manager.py b/routes/utils/celery_queue_manager.py index d067a27..10b47f1 100644 --- a/routes/utils/celery_queue_manager.py +++ b/routes/utils/celery_queue_manager.py @@ -72,6 +72,9 @@ def get_config_params(): ), "separateTracksByUser": config.get("separateTracksByUser", False), "watch": config.get("watch", {}), + "realTimeMultiplier": config.get( + "realTimeMultiplier", config.get("real_time_multiplier", 0) + ), } except Exception as e: logger.error(f"Error reading config for parameters: {e}") @@ -96,6 +99,7 @@ def get_config_params(): "recursiveQuality": False, "separateTracksByUser": False, "watch": {}, + "realTimeMultiplier": 0, } @@ -363,7 +367,7 @@ class CeleryDownloadQueueManager: original_request = task.get( "orig_request", task.get("original_request", {}) ) - + # Get username for user-specific paths username = task.get("username", "") @@ -389,9 +393,11 @@ class CeleryDownloadQueueManager: original_request.get("real_time"), config_params["realTime"] ), "custom_dir_format": self._get_user_specific_dir_format( - original_request.get("custom_dir_format", config_params["customDirFormat"]), + original_request.get( + "custom_dir_format", config_params["customDirFormat"] + ), config_params.get("separateTracksByUser", False), - username + username, ), "custom_track_format": original_request.get( "custom_track_format", config_params["customTrackFormat"] @@ -419,6 +425,9 @@ class CeleryDownloadQueueManager: "retry_count": 0, "original_request": original_request, "created_at": time.time(), + "real_time_multiplier": original_request.get( + "real_time_multiplier", config_params.get("realTimeMultiplier", 0) + ), } # If from_watch_job is True, ensure track_details_for_db is passed through @@ -497,12 +506,12 @@ class CeleryDownloadQueueManager: def _get_user_specific_dir_format(self, base_format, separate_by_user, username): """ Modify the directory format to include username if separateTracksByUser is enabled - + Args: base_format (str): The base directory format from config separate_by_user (bool): Whether to separate tracks by user username (str): The username to include in path - + Returns: str: The modified directory format """ diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py index 654d8a7..f1a384d 100644 --- a/routes/utils/celery_tasks.py +++ b/routes/utils/celery_tasks.py @@ -1626,6 +1626,9 @@ def download_track(self, **task_data): spotify_metadata = task_data.get( "spotify_metadata", config_params.get("spotifyMetadata", True) ) + real_time_multiplier = task_data.get( + "real_time_multiplier", config_params.get("realTimeMultiplier", 0) + ) # Execute the download - service is now determined from URL download_track_func( @@ -1646,6 +1649,7 @@ def download_track(self, **task_data): artist_separator=artist_separator, spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task (consistency) + real_time_multiplier=real_time_multiplier, ) return {"status": "success", "message": "Track download completed"} @@ -1725,6 +1729,9 @@ def download_album(self, **task_data): spotify_metadata = task_data.get( "spotify_metadata", config_params.get("spotifyMetadata", True) ) + real_time_multiplier = task_data.get( + "real_time_multiplier", config_params.get("realTimeMultiplier", 0) + ) # Execute the download - service is now determined from URL download_album_func( @@ -1745,6 +1752,7 @@ def download_album(self, **task_data): artist_separator=artist_separator, spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task + real_time_multiplier=real_time_multiplier, ) return {"status": "success", "message": "Album download completed"} @@ -1833,6 +1841,9 @@ def download_playlist(self, **task_data): "retry_delay_increase", config_params.get("retryDelayIncrease", 5) ) max_retries = task_data.get("max_retries", config_params.get("maxRetries", 3)) + real_time_multiplier = task_data.get( + "real_time_multiplier", config_params.get("realTimeMultiplier", 0) + ) # Execute the download - service is now determined from URL download_playlist_func( @@ -1856,6 +1867,7 @@ def download_playlist(self, **task_data): artist_separator=artist_separator, spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task + real_time_multiplier=real_time_multiplier, ) return {"status": "success", "message": "Playlist download completed"} diff --git a/routes/utils/get_info.py b/routes/utils/get_info.py index 7729b17..df1384d 100644 --- a/routes/utils/get_info.py +++ b/routes/utils/get_info.py @@ -239,7 +239,7 @@ def get_spotify_info( Args: spotify_id: The Spotify ID of the entity - spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode) + spotify_type: The type of entity (track, album, playlist, artist, artist_discography, episode, album_tracks) limit (int, optional): The maximum number of items to return. Used for pagination. offset (int, optional): The index of the first item to return. Used for pagination. @@ -255,6 +255,12 @@ def get_spotify_info( elif spotify_type == "album": return client.album(spotify_id) + elif spotify_type == "album_tracks": + # Fetch album's tracks with pagination support + return client.album_tracks( + spotify_id, limit=limit or 20, offset=offset or 0 + ) + elif spotify_type == "playlist": # Use optimized playlist fetching return get_playlist_full(spotify_id) @@ -269,7 +275,10 @@ def get_spotify_info( elif spotify_type == "artist_discography": # Get artist's albums with pagination albums = client.artist_albums( - spotify_id, limit=limit or 20, offset=offset or 0, include_groups="single,album,appears_on" + spotify_id, + limit=limit or 20, + offset=offset or 0, + include_groups="single,album,appears_on", ) return albums diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index d6105b3..ffd47ed 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -28,6 +28,7 @@ def download_playlist( recursive_quality=True, spotify_metadata=True, _is_celery_task_execution=False, # Added to skip duplicate check from Celery task + real_time_multiplier=None, ): if not _is_celery_task_execution: existing_task = get_existing_task_id( @@ -175,6 +176,7 @@ def download_playlist( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: playlist.py - Spotify direct download (account: {main} for blob) successful." @@ -236,6 +238,7 @@ def download_playlist( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: playlist.py - Direct Spotify download (account: {main} for blob) successful." diff --git a/routes/utils/track.py b/routes/utils/track.py index 61d86af..6af3c7d 100755 --- a/routes/utils/track.py +++ b/routes/utils/track.py @@ -29,6 +29,7 @@ def download_track( recursive_quality=False, spotify_metadata=True, _is_celery_task_execution=False, # Added for consistency, not currently used for duplicate check + real_time_multiplier=None, ): try: # Detect URL source (Spotify or Deezer) from URL @@ -166,6 +167,7 @@ def download_track( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: track.py - Spotify direct download (account: {main} for blob) successful." @@ -222,6 +224,7 @@ def download_track( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + real_time_multiplier=real_time_multiplier, ) print( f"DEBUG: track.py - Direct Spotify download (account: {main} for blob) successful." diff --git a/spotizerr-ui/src/routes/album.tsx b/spotizerr-ui/src/routes/album.tsx index 74a7825..502001c 100644 --- a/spotizerr-ui/src/routes/album.tsx +++ b/spotizerr-ui/src/routes/album.tsx @@ -1,5 +1,5 @@ import { Link, useParams } from "@tanstack/react-router"; -import { useEffect, useState, useContext } from "react"; +import { useEffect, useState, useContext, useRef, useCallback } from "react"; import apiClient from "../lib/api-client"; import { QueueContext } from "../contexts/queue-context"; import { useSettings } from "../contexts/settings-context"; @@ -10,31 +10,91 @@ import { FaArrowLeft } from "react-icons/fa"; export const Album = () => { const { albumId } = useParams({ from: "/album/$albumId" }); const [album, setAlbum] = useState(null); + const [tracks, setTracks] = useState([]); + const [offset, setOffset] = useState(0); + const [isLoading, setIsLoading] = useState(false); + const [isLoadingMore, setIsLoadingMore] = useState(false); const [error, setError] = useState(null); const context = useContext(QueueContext); const { settings } = useSettings(); + const loadMoreRef = useRef(null); + + const PAGE_SIZE = 50; if (!context) { throw new Error("useQueue must be used within a QueueProvider"); } const { addItem } = context; + const totalTracks = album?.total_tracks ?? 0; + const hasMore = tracks.length < totalTracks; + + // Initial load useEffect(() => { const fetchAlbum = async () => { + if (!albumId) return; + setIsLoading(true); + setError(null); try { - const response = await apiClient.get(`/album/info?id=${albumId}`); - setAlbum(response.data); + const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=0`); + const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; + setAlbum(data); + setTracks(data.tracks.items || []); + setOffset((data.tracks.items || []).length); } catch (err) { setError("Failed to load album"); console.error("Error fetching album:", err); + } finally { + setIsLoading(false); } }; + // reset state when albumId changes + setAlbum(null); + setTracks([]); + setOffset(0); if (albumId) { fetchAlbum(); } }, [albumId]); + const loadMore = useCallback(async () => { + if (!albumId || isLoadingMore || !hasMore) return; + setIsLoadingMore(true); + try { + const response = await apiClient.get(`/album/info?id=${albumId}&limit=${PAGE_SIZE}&offset=${offset}`); + const data: AlbumType & { tracks: { items: TrackType[]; total?: number; limit?: number; offset?: number } } = response.data; + const newItems = data.tracks.items || []; + setTracks((prev) => [...prev, ...newItems]); + setOffset((prev) => prev + newItems.length); + } catch (err) { + console.error("Error fetching more tracks:", err); + } finally { + setIsLoadingMore(false); + } + }, [albumId, offset, isLoadingMore, hasMore]); + + // IntersectionObserver to trigger loadMore + useEffect(() => { + if (!loadMoreRef.current) return; + const sentinel = loadMoreRef.current; + const observer = new IntersectionObserver( + (entries) => { + const first = entries[0]; + if (first.isIntersecting) { + loadMore(); + } + }, + { root: null, rootMargin: "200px", threshold: 0.1 } + ); + + observer.observe(sentinel); + return () => { + observer.unobserve(sentinel); + observer.disconnect(); + }; + }, [loadMore]); + const handleDownloadTrack = (track: TrackType) => { if (!track.id) return; toast.info(`Adding ${track.name} to queue...`); @@ -51,7 +111,7 @@ export const Album = () => { return
{error}
; } - if (!album) { + if (!album || isLoading) { return
Loading...
; } @@ -67,7 +127,7 @@ export const Album = () => { ); } - const hasExplicitTrack = album.tracks.items.some((track) => track.explicit); + const hasExplicitTrack = tracks.some((track) => track.explicit); return (
@@ -130,11 +190,11 @@ export const Album = () => {

Tracks

- {album.tracks.items.map((track, index) => { + {tracks.map((track, index) => { if (isExplicitFilterEnabled && track.explicit) { return (
@@ -147,7 +207,7 @@ export const Album = () => { } return (
@@ -188,6 +248,13 @@ export const Album = () => {
); })} +
+ {isLoadingMore && ( +
Loading more...
+ )} + {!hasMore && tracks.length > 0 && ( +
End of album
+ )}
From e5c014dad5c7710736108ca14c52527c51efc38e Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Tue, 19 Aug 2025 21:59:08 -0600 Subject: [PATCH 09/14] feat: added real time multiplier setting to frontend --- requirements.txt | 2 +- routes/utils/watch/manager.py | 64 ++++++++++++++++++- .../src/components/config/DownloadsTab.tsx | 24 ++++++- .../src/contexts/SettingsProvider.tsx | 3 + spotizerr-ui/src/contexts/settings-context.ts | 1 + 5 files changed, 90 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 74c8a5a..9e7efac 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==2.6.0 +deezspot-spotizerr==2.7.1 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/routes/utils/watch/manager.py b/routes/utils/watch/manager.py index 23d6118..97510b8 100644 --- a/routes/utils/watch/manager.py +++ b/routes/utils/watch/manager.py @@ -30,6 +30,9 @@ from routes.utils.get_info import ( ) # To fetch playlist, track, artist, and album details from routes.utils.celery_queue_manager import download_queue_manager +# Added import to fetch base formatting config +from routes.utils.celery_queue_manager import get_config_params + logger = logging.getLogger(__name__) MAIN_CONFIG_FILE_PATH = Path("./data/config/main.json") WATCH_OLD_FILE_PATH = Path("./data/config/watch.json") @@ -153,6 +156,38 @@ def construct_spotify_url(item_id, item_type="track"): return f"https://open.spotify.com/{item_type}/{item_id}" +# Helper to replace playlist placeholders in custom formats per-track +def _apply_playlist_placeholders( + base_dir_fmt: str, + base_track_fmt: str, + playlist_name: str, + playlist_position_one_based: int, + total_tracks_in_playlist: int, + pad_tracks: bool, +) -> tuple[str, str]: + try: + width = max(2, len(str(total_tracks_in_playlist))) if pad_tracks else 0 + if ( + pad_tracks + and playlist_position_one_based is not None + and playlist_position_one_based > 0 + ): + playlist_num_str = str(playlist_position_one_based).zfill(width) + else: + playlist_num_str = ( + str(playlist_position_one_based) if playlist_position_one_based else "" + ) + + dir_fmt = base_dir_fmt.replace("%playlist%", playlist_name) + track_fmt = base_track_fmt.replace("%playlist%", playlist_name).replace( + "%playlistnum%", playlist_num_str + ) + return dir_fmt, track_fmt + except Exception: + # On any error, return originals + return base_dir_fmt, base_track_fmt + + def has_playlist_changed(playlist_spotify_id: str, current_snapshot_id: str) -> bool: """ Check if a playlist has changed by comparing snapshot_id. @@ -320,6 +355,11 @@ def check_watched_playlists(specific_playlist_id: str = None): ) config = get_watch_config() use_snapshot_checking = config.get("useSnapshotIdChecking", True) + # Fetch base formatting configuration once for this run + formatting_cfg = get_config_params() + base_dir_fmt = formatting_cfg.get("customDirFormat", "%ar_album%/%album%") + base_track_fmt = formatting_cfg.get("customTrackFormat", "%tracknum%. %music%") + pad_tracks = formatting_cfg.get("tracknumPadding", True) if specific_playlist_id: playlist_obj = get_watched_playlist(specific_playlist_id) @@ -483,12 +523,17 @@ def check_watched_playlists(specific_playlist_id: str = None): current_api_track_ids = set() api_track_id_to_item_map = {} - for item in all_api_track_items: # Use all_api_track_items + api_track_position_map: dict[str, int] = {} + # Build maps for quick lookup and position within the playlist (1-based) + for idx, item in enumerate( + all_api_track_items, start=1 + ): # Use overall playlist index for numbering track = item.get("track") if track and track.get("id") and not track.get("is_local"): track_id = track["id"] current_api_track_ids.add(track_id) api_track_id_to_item_map[track_id] = item + api_track_position_map[track_id] = idx db_track_ids = get_playlist_track_ids_from_db(playlist_spotify_id) @@ -507,6 +552,19 @@ def check_watched_playlists(specific_playlist_id: str = None): continue track_to_queue = api_item["track"] + # Compute per-track formatting overrides for playlist placeholders + position_in_playlist = api_track_position_map.get(track_id) + custom_dir_format, custom_track_format = ( + _apply_playlist_placeholders( + base_dir_fmt, + base_track_fmt, + playlist_name, + position_in_playlist if position_in_playlist else 0, + api_total_tracks, + pad_tracks, + ) + ) + task_payload = { "download_type": "track", "url": construct_spotify_url(track_id, "track"), @@ -525,7 +583,9 @@ def check_watched_playlists(specific_playlist_id: str = None): "track_spotify_id": track_id, "track_item_for_db": api_item, # Pass full API item for DB update on completion }, - # "track_details_for_db" was old name, using track_item_for_db consistent with celery_tasks + # Override formats so %playlist% and %playlistnum% resolve now per track + "custom_dir_format": custom_dir_format, + "custom_track_format": custom_track_format, } try: task_id_or_none = download_queue_manager.add_task( diff --git a/spotizerr-ui/src/components/config/DownloadsTab.tsx b/spotizerr-ui/src/components/config/DownloadsTab.tsx index e320c39..bb491b8 100644 --- a/spotizerr-ui/src/components/config/DownloadsTab.tsx +++ b/spotizerr-ui/src/components/config/DownloadsTab.tsx @@ -23,6 +23,7 @@ interface DownloadSettings { spotifyQuality: "NORMAL" | "HIGH" | "VERY_HIGH"; recursiveQuality: boolean; // frontend field (sent as camelCase to backend) separateTracksByUser: boolean; + realTimeMultiplier: number; } interface WatchConfig { @@ -150,7 +151,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { const missingServices: string[] = []; if (!spotifyCredentials?.length) missingServices.push("Spotify"); if (!deezerCredentials?.length) missingServices.push("Deezer"); - const error = `Download Fallback requires accounts to be configured for both services. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`; + const error = `Download Fallback requires accounts to be configured for both Spotify and Deezer. Missing: ${missingServices.join(", ")}. Configure accounts in the Accounts tab.`; setValidationError(error); toast.error("Validation failed: " + error); return; @@ -162,6 +163,7 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { maxRetries: Number(data.maxRetries), retryDelaySeconds: Number(data.retryDelaySeconds), retryDelayIncrease: Number(data.retryDelayIncrease), + realTimeMultiplier: Number(data.realTimeMultiplier ?? 0), }); }; @@ -188,6 +190,26 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) {
+ {/* Real-time Multiplier (Spotify only) */} +
+
+ + 0–10 +
+ +

+ Controls how fast Spotify real-time downloads go. Only affects Spotify downloads; ignored for Deezer. +

+
diff --git a/spotizerr-ui/src/contexts/SettingsProvider.tsx b/spotizerr-ui/src/contexts/SettingsProvider.tsx index 127f5e8..10b328e 100644 --- a/spotizerr-ui/src/contexts/SettingsProvider.tsx +++ b/spotizerr-ui/src/contexts/SettingsProvider.tsx @@ -62,6 +62,7 @@ export type FlatAppSettings = { compilation: string; artistSeparator: string; spotifyMetadata: boolean; + realTimeMultiplier: number; }; const defaultSettings: FlatAppSettings = { @@ -102,6 +103,7 @@ const defaultSettings: FlatAppSettings = { watch: { enabled: false, }, + realTimeMultiplier: 0, }; interface FetchedCamelCaseSettings { @@ -129,6 +131,7 @@ const fetchSettings = async (): Promise => { ...(camelData as unknown as FlatAppSettings), // Ensure required frontend-only fields exist recursiveQuality: Boolean((camelData as any).recursiveQuality ?? false), + realTimeMultiplier: Number((camelData as any).realTimeMultiplier ?? 0), }; return withDefaults; diff --git a/spotizerr-ui/src/contexts/settings-context.ts b/spotizerr-ui/src/contexts/settings-context.ts index e0c6011..8e9d534 100644 --- a/spotizerr-ui/src/contexts/settings-context.ts +++ b/spotizerr-ui/src/contexts/settings-context.ts @@ -40,6 +40,7 @@ export interface AppSettings { // Add other watch properties from the old type if they still exist in the API response }; // Add other root-level properties from the API if they exist + realTimeMultiplier: number; } export interface SettingsContextType { From 5b261e45f320d83c5d3df187f402f472c0a559f7 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 20 Aug 2025 09:36:42 -0500 Subject: [PATCH 10/14] fix: #271 --- requirements.txt | 2 +- routes/utils/history_manager.py | 2614 ++++++++++++++++--------------- 2 files changed, 1322 insertions(+), 1294 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9e7efac..1d9e200 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==2.7.1 +deezspot-spotizerr==2.7.2 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/routes/utils/history_manager.py b/routes/utils/history_manager.py index 33a7da1..26b9913 100644 --- a/routes/utils/history_manager.py +++ b/routes/utils/history_manager.py @@ -11,55 +11,55 @@ logger = logging.getLogger(__name__) class HistoryManager: - """ - Manages download history storage using SQLite database. - Stores hierarchical download data from deezspot callback objects. - """ + """ + Manages download history storage using SQLite database. + Stores hierarchical download data from deezspot callback objects. + """ - def __init__(self, db_path: str = "data/history/download_history.db"): - """ - Initialize the history manager with database path. + def __init__(self, db_path: str = "data/history/download_history.db"): + """ + Initialize the history manager with database path. - Args: - db_path: Path to SQLite database file - """ - self.db_path = Path(db_path) - self.db_path.parent.mkdir(parents=True, exist_ok=True) - self._ensure_database_exists() + Args: + db_path: Path to SQLite database file + """ + self.db_path = Path(db_path) + self.db_path.parent.mkdir(parents=True, exist_ok=True) + self._ensure_database_exists() - def _ensure_database_exists(self): - """Create database and main table if they don't exist and migrate schema safely.""" - expected_download_history_columns: Dict[str, str] = { - "id": "INTEGER PRIMARY KEY AUTOINCREMENT", - "download_type": "TEXT NOT NULL", - "title": "TEXT NOT NULL", - "artists": "TEXT", - "timestamp": "REAL NOT NULL", - "status": "TEXT NOT NULL", - "service": "TEXT", - "quality_format": "TEXT", - "quality_bitrate": "TEXT", - "total_tracks": "INTEGER", - "successful_tracks": "INTEGER", - "failed_tracks": "INTEGER", - "skipped_tracks": "INTEGER", - "children_table": "TEXT", - "task_id": "TEXT", - "external_ids": "TEXT", - "metadata": "TEXT", - "release_date": "TEXT", - "genres": "TEXT", - "images": "TEXT", - "owner": "TEXT", - "album_type": "TEXT", - "duration_total_ms": "INTEGER", - "explicit": "BOOLEAN", - } + def _ensure_database_exists(self): + """Create database and main table if they don't exist and migrate schema safely.""" + expected_download_history_columns: Dict[str, str] = { + "id": "INTEGER PRIMARY KEY AUTOINCREMENT", + "download_type": "TEXT NOT NULL", + "title": "TEXT NOT NULL", + "artists": "TEXT", + "timestamp": "REAL NOT NULL", + "status": "TEXT NOT NULL", + "service": "TEXT", + "quality_format": "TEXT", + "quality_bitrate": "TEXT", + "total_tracks": "INTEGER", + "successful_tracks": "INTEGER", + "failed_tracks": "INTEGER", + "skipped_tracks": "INTEGER", + "children_table": "TEXT", + "task_id": "TEXT", + "external_ids": "TEXT", + "metadata": "TEXT", + "release_date": "TEXT", + "genres": "TEXT", + "images": "TEXT", + "owner": "TEXT", + "album_type": "TEXT", + "duration_total_ms": "INTEGER", + "explicit": "BOOLEAN", + } - with self._get_connection() as conn: - cursor = conn.cursor() - # 1) Create table if missing with minimal schema - cursor.execute(""" + with self._get_connection() as conn: + cursor = conn.cursor() + # 1) Create table if missing with minimal schema + cursor.execute(""" CREATE TABLE IF NOT EXISTS download_history ( id INTEGER PRIMARY KEY AUTOINCREMENT, download_type TEXT NOT NULL, @@ -67,215 +67,215 @@ class HistoryManager: ) """) - # 2) Ensure/upgrade schema columns idempotently - self._ensure_table_schema( - cursor, - "download_history", - expected_download_history_columns, - "download history", - ) + # 2) Ensure/upgrade schema columns idempotently + self._ensure_table_schema( + cursor, + "download_history", + expected_download_history_columns, + "download history", + ) - # 2.5) Backfill defaults for critical columns to avoid NULLs post-migration - try: - cursor.execute("PRAGMA table_info(download_history)") - cols = {row[1] for row in cursor.fetchall()} - if "title" in cols: - cursor.execute( - """ + # 2.5) Backfill defaults for critical columns to avoid NULLs post-migration + try: + cursor.execute("PRAGMA table_info(download_history)") + cols = {row[1] for row in cursor.fetchall()} + if "title" in cols: + cursor.execute( + """ UPDATE download_history SET title = COALESCE(NULLIF(title, ''), 'Unknown') WHERE title IS NULL OR title = '' """ - ) - if "status" in cols: - cursor.execute( - """ + ) + if "status" in cols: + cursor.execute( + """ UPDATE download_history SET status = COALESCE(NULLIF(status, ''), 'unknown') WHERE status IS NULL OR status = '' """ - ) - if "download_type" in cols: - cursor.execute( - """ + ) + if "download_type" in cols: + cursor.execute( + """ UPDATE download_history SET download_type = COALESCE(NULLIF(download_type, ''), 'unknown') WHERE download_type IS NULL OR download_type = '' """ - ) - except Exception as e: - logger.warning( - f"Non-fatal: failed to backfill defaults for download_history: {e}" - ) + ) + except Exception as e: + logger.warning( + f"Non-fatal: failed to backfill defaults for download_history: {e}" + ) - # 3) Migrate legacy columns to new ones (best-effort, non-fatal) - try: - cursor.execute("PRAGMA table_info(download_history)") - cols = {row[1] for row in cursor.fetchall()} + # 3) Migrate legacy columns to new ones (best-effort, non-fatal) + try: + cursor.execute("PRAGMA table_info(download_history)") + cols = {row[1] for row in cursor.fetchall()} - # Legacy timestamp columns → timestamp - if "timestamp" not in cols: - # Add column first - cursor.execute( - "ALTER TABLE download_history ADD COLUMN timestamp REAL" - ) - # Backfill from legacy columns if present - legacy_time_cols = [ - c for c in ["time", "created_at", "date"] if c in cols - ] - if legacy_time_cols: - # Pick the first legacy column to backfill - legacy_col = legacy_time_cols[0] - try: - cursor.execute( - f"UPDATE download_history SET timestamp = CASE WHEN {legacy_col} IS NOT NULL THEN {legacy_col} ELSE strftime('%s','now') END" - ) - except sqlite3.Error: - # Fallback: just set to now - cursor.execute( - "UPDATE download_history SET timestamp = strftime('%s','now')" - ) - else: - # Default all to now if nothing to migrate - cursor.execute( - "UPDATE download_history SET timestamp = strftime('%s','now')" - ) + # Legacy timestamp columns → timestamp + if "timestamp" not in cols: + # Add column first + cursor.execute( + "ALTER TABLE download_history ADD COLUMN timestamp REAL" + ) + # Backfill from legacy columns if present + legacy_time_cols = [ + c for c in ["time", "created_at", "date"] if c in cols + ] + if legacy_time_cols: + # Pick the first legacy column to backfill + legacy_col = legacy_time_cols[0] + try: + cursor.execute( + f"UPDATE download_history SET timestamp = CASE WHEN {legacy_col} IS NOT NULL THEN {legacy_col} ELSE strftime('%s','now') END" + ) + except sqlite3.Error: + # Fallback: just set to now + cursor.execute( + "UPDATE download_history SET timestamp = strftime('%s','now')" + ) + else: + # Default all to now if nothing to migrate + cursor.execute( + "UPDATE download_history SET timestamp = strftime('%s','now')" + ) - # quality → quality_format, bitrate → quality_bitrate - # Handle common legacy pairs non-fataly - cursor.execute("PRAGMA table_info(download_history)") - cols = {row[1] for row in cursor.fetchall()} - if "quality_format" not in cols and "quality" in cols: - cursor.execute( - "ALTER TABLE download_history ADD COLUMN quality_format TEXT" - ) - try: - cursor.execute( - "UPDATE download_history SET quality_format = quality WHERE quality_format IS NULL" - ) - except sqlite3.Error: - pass - if "quality_bitrate" not in cols and "bitrate" in cols: - cursor.execute( - "ALTER TABLE download_history ADD COLUMN quality_bitrate TEXT" - ) - try: - cursor.execute( - "UPDATE download_history SET quality_bitrate = bitrate WHERE quality_bitrate IS NULL" - ) - except sqlite3.Error: - pass - except Exception as e: - logger.warning( - f"Non-fatal: failed legacy column migration for download_history: {e}" - ) + # quality → quality_format, bitrate → quality_bitrate + # Handle common legacy pairs non-fataly + cursor.execute("PRAGMA table_info(download_history)") + cols = {row[1] for row in cursor.fetchall()} + if "quality_format" not in cols and "quality" in cols: + cursor.execute( + "ALTER TABLE download_history ADD COLUMN quality_format TEXT" + ) + try: + cursor.execute( + "UPDATE download_history SET quality_format = quality WHERE quality_format IS NULL" + ) + except sqlite3.Error: + pass + if "quality_bitrate" not in cols and "bitrate" in cols: + cursor.execute( + "ALTER TABLE download_history ADD COLUMN quality_bitrate TEXT" + ) + try: + cursor.execute( + "UPDATE download_history SET quality_bitrate = bitrate WHERE quality_bitrate IS NULL" + ) + except sqlite3.Error: + pass + except Exception as e: + logger.warning( + f"Non-fatal: failed legacy column migration for download_history: {e}" + ) - # 4) Create indexes only if columns exist (avoid startup failures) - try: - cursor.execute("PRAGMA table_info(download_history)") - cols = {row[1] for row in cursor.fetchall()} + # 4) Create indexes only if columns exist (avoid startup failures) + try: + cursor.execute("PRAGMA table_info(download_history)") + cols = {row[1] for row in cursor.fetchall()} - if "timestamp" in cols: - cursor.execute(""" + if "timestamp" in cols: + cursor.execute(""" CREATE INDEX IF NOT EXISTS idx_download_history_timestamp ON download_history(timestamp) """) - if {"download_type", "status"}.issubset(cols): - cursor.execute(""" + if {"download_type", "status"}.issubset(cols): + cursor.execute(""" CREATE INDEX IF NOT EXISTS idx_download_history_type_status ON download_history(download_type, status) """) - if "task_id" in cols: - cursor.execute(""" + if "task_id" in cols: + cursor.execute(""" CREATE INDEX IF NOT EXISTS idx_download_history_task_id ON download_history(task_id) """) - # Preserve uniqueness from previous schema using a unique index (safer than table constraint for migrations) - if {"task_id", "download_type", "external_ids"}.issubset(cols): - cursor.execute( - """ + # Preserve uniqueness from previous schema using a unique index (safer than table constraint for migrations) + if {"task_id", "download_type", "external_ids"}.issubset(cols): + cursor.execute( + """ CREATE UNIQUE INDEX IF NOT EXISTS uq_download_history_task_type_ids ON download_history(task_id, download_type, external_ids) """ - ) - except Exception as e: - logger.warning( - f"Non-fatal: failed to create indexes for download_history: {e}" - ) + ) + except Exception as e: + logger.warning( + f"Non-fatal: failed to create indexes for download_history: {e}" + ) - # 5) Best-effort upgrade of existing children tables (album_*, playlist_*) - try: - self._migrate_existing_children_tables(cursor) - except Exception as e: - logger.warning(f"Non-fatal: failed to migrate children tables: {e}") + # 5) Best-effort upgrade of existing children tables (album_*, playlist_*) + try: + self._migrate_existing_children_tables(cursor) + except Exception as e: + logger.warning(f"Non-fatal: failed to migrate children tables: {e}") - @contextmanager - def _get_connection(self): - """Get database connection with proper error handling.""" - conn = None - try: - conn = sqlite3.connect(str(self.db_path)) - conn.row_factory = sqlite3.Row # Enable dict-like row access - yield conn - conn.commit() - except Exception as e: - if conn: - conn.rollback() - logger.error(f"Database error: {e}") - raise - finally: - if conn: - conn.close() + @contextmanager + def _get_connection(self): + """Get database connection with proper error handling.""" + conn = None + try: + conn = sqlite3.connect(str(self.db_path)) + conn.row_factory = sqlite3.Row # Enable dict-like row access + yield conn + conn.commit() + except Exception as e: + if conn: + conn.rollback() + logger.error(f"Database error: {e}") + raise + finally: + if conn: + conn.close() - def _ensure_table_schema( - self, - cursor: sqlite3.Cursor, - table_name: str, - expected_columns: Dict[str, str], - table_description: str, - ) -> None: - """Ensure all expected columns exist in the given table, adding any missing columns.""" - try: - cursor.execute(f"PRAGMA table_info({table_name})") - existing_info = cursor.fetchall() - existing_names = {row[1] for row in existing_info} + def _ensure_table_schema( + self, + cursor: sqlite3.Cursor, + table_name: str, + expected_columns: Dict[str, str], + table_description: str, + ) -> None: + """Ensure all expected columns exist in the given table, adding any missing columns.""" + try: + cursor.execute(f"PRAGMA table_info({table_name})") + existing_info = cursor.fetchall() + existing_names = {row[1] for row in existing_info} - for col_name, col_type in expected_columns.items(): - if col_name not in existing_names: - # Avoid adding PRIMARY KEY or NOT NULL on existing tables; strip them for ALTER - col_type_for_add = ( - col_type.replace("PRIMARY KEY", "") - .replace("AUTOINCREMENT", "") - .replace("NOT NULL", "") - .strip() - ) - try: - cursor.execute( - f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}" - ) - logger.info( - f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'." - ) - except sqlite3.Error as e: - logger.warning( - f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}" - ) - except sqlite3.Error as e: - logger.error( - f"Error ensuring schema for {table_description} table '{table_name}': {e}" - ) + for col_name, col_type in expected_columns.items(): + if col_name not in existing_names: + # Avoid adding PRIMARY KEY or NOT NULL on existing tables; strip them for ALTER + col_type_for_add = ( + col_type.replace("PRIMARY KEY", "") + .replace("AUTOINCREMENT", "") + .replace("NOT NULL", "") + .strip() + ) + try: + cursor.execute( + f"ALTER TABLE {table_name} ADD COLUMN {col_name} {col_type_for_add}" + ) + logger.info( + f"Added missing column '{col_name} {col_type_for_add}' to {table_description} table '{table_name}'." + ) + except sqlite3.Error as e: + logger.warning( + f"Could not add column '{col_name}' to {table_description} table '{table_name}': {e}" + ) + except sqlite3.Error as e: + logger.error( + f"Error ensuring schema for {table_description} table '{table_name}': {e}" + ) - def _create_children_table(self, table_name: str): - """ - Create a children table for storing individual tracks of an album/playlist. - Ensures schema upgrades for existing tables. + def _create_children_table(self, table_name: str): + """ + Create a children table for storing individual tracks of an album/playlist. + Ensures schema upgrades for existing tables. - Args: - table_name: Name of the children table (e.g., 'album_abc123') - """ - with self._get_connection() as conn: - cursor = conn.cursor() - cursor.execute(f""" + Args: + table_name: Name of the children table (e.g., 'album_abc123') + """ + with self._get_connection() as conn: + cursor = conn.cursor() + cursor.execute(f""" CREATE TABLE IF NOT EXISTS {table_name} ( id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL, @@ -294,184 +294,190 @@ class HistoryManager: metadata TEXT ) """) - expected_children_columns = { - "id": "INTEGER PRIMARY KEY AUTOINCREMENT", - "title": "TEXT NOT NULL", - "artists": "TEXT", - "album_title": "TEXT", - "duration_ms": "INTEGER", - "track_number": "INTEGER", - "disc_number": "INTEGER", - "explicit": "BOOLEAN", - "status": "TEXT NOT NULL", - "external_ids": "TEXT", - "genres": "TEXT", - "isrc": "TEXT", - "timestamp": "REAL NOT NULL", - "position": "INTEGER", - "metadata": "TEXT", - } - self._ensure_table_schema( - cursor, table_name, expected_children_columns, "children history" - ) + expected_children_columns = { + "id": "INTEGER PRIMARY KEY AUTOINCREMENT", + "title": "TEXT NOT NULL", + "artists": "TEXT", + "album_title": "TEXT", + "duration_ms": "INTEGER", + "track_number": "INTEGER", + "disc_number": "INTEGER", + "explicit": "BOOLEAN", + "status": "TEXT NOT NULL", + "external_ids": "TEXT", + "genres": "TEXT", + "isrc": "TEXT", + "timestamp": "REAL NOT NULL", + "position": "INTEGER", + "metadata": "TEXT", + } + self._ensure_table_schema( + cursor, table_name, expected_children_columns, "children history" + ) - def _migrate_existing_children_tables(self, cursor: sqlite3.Cursor) -> None: - """Find album_* and playlist_* children tables and ensure they have the expected schema.""" - cursor.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%')" - ) - tables = [row[0] for row in cursor.fetchall() if row[0] != "download_history"] - for t in tables: - try: - # Ensure existence + schema upgrades - cursor.execute( - f"CREATE TABLE IF NOT EXISTS {t} (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)" - ) - self._create_children_table(t) - except Exception as e: - logger.warning(f"Non-fatal: failed to migrate children table {t}: {e}") + def _migrate_existing_children_tables(self, cursor: sqlite3.Cursor) -> None: + """Find album_* and playlist_* children tables and ensure they have the expected schema.""" + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%')" + ) + tables = [row[0] for row in cursor.fetchall() if row[0] != "download_history"] + for t in tables: + try: + # Ensure existence + schema upgrades + cursor.execute( + f"CREATE TABLE IF NOT EXISTS {t} (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)" + ) + self._create_children_table(t) + except Exception as e: + logger.warning(f"Non-fatal: failed to migrate children table {t}: {e}") - def _extract_artists(self, obj: Dict) -> List[str]: - """Extract artist names from various object types.""" - artists = obj.get("artists", []) - if not artists: - return [] + def _extract_artists(self, obj: Dict) -> List[str]: + """Extract artist names from various object types.""" + artists = obj.get("artists", []) + if not artists: + return [] - artist_names = [] - for artist in artists: - if isinstance(artist, dict): - name = artist.get("name", "") - if name: - artist_names.append(name) - elif isinstance(artist, str): - artist_names.append(artist) + artist_names = [] + for artist in artists: + if isinstance(artist, dict): + name = artist.get("name", "") + if name: + artist_names.append(name) + elif isinstance(artist, str): + artist_names.append(artist) - return artist_names + return artist_names - def _extract_external_ids(self, obj: Dict) -> Dict: - """Extract external service IDs from object.""" - return obj.get("ids", {}) + def _extract_external_ids(self, obj: Dict) -> Dict: + """Extract external service IDs from object.""" + return obj.get("ids", {}) - def _extract_images(self, obj: Dict) -> List[Dict]: - """Extract image information from object.""" - return obj.get("images", []) + def _extract_images(self, obj: Dict) -> List[Dict]: + """Extract image information from object.""" + return obj.get("images", []) - def _extract_release_date(self, obj: Dict) -> Dict: - """Extract release date information from object.""" - return obj.get("release_date", {}) + def _extract_release_date(self, obj: Dict) -> Dict: + """Extract release date information from object.""" + return obj.get("release_date", {}) - def _calculate_total_duration(self, tracks: List[Dict]) -> int: - """Calculate total duration from tracks list.""" - total = 0 - for track in tracks: - duration = track.get("duration_ms", 0) - if duration: - total += duration - return total + def _calculate_total_duration(self, tracks: List[Dict]) -> int: + """Calculate total duration from tracks list.""" + total = 0 + for track in tracks: + duration = track.get("duration_ms", 0) + if duration: + total += duration + return total - def _get_primary_service(self, external_ids: Dict) -> str: - """Determine primary service from external IDs.""" - if "spotify" in external_ids: - return "spotify" - elif "deezer" in external_ids: - return "deezer" - else: - return "unknown" + def _get_primary_service(self, external_ids: Dict) -> str: + """Determine primary service from external IDs.""" + if "spotify" in external_ids: + return "spotify" + elif "deezer" in external_ids: + return "deezer" + else: + return "unknown" - def create_children_table_for_album(self, callback_data: Dict, task_id: str) -> str: - """ - Create children table for album download at the start and return table name. + def create_children_table_for_album(self, callback_data: Dict, task_id: str) -> str: + """ + Create children table for album download at the start and return table name. - Args: - callback_data: Album callback object from deezspot - task_id: Celery task ID + Args: + callback_data: Album callback object from deezspot + task_id: Celery task ID - Returns: - Children table name - """ - # Generate children table name - album_uuid = str(uuid.uuid4()).replace("-", "")[:10] - children_table = f"album_{album_uuid}" + Returns: + Children table name + """ + # Generate children table name + album_uuid = str(uuid.uuid4()).replace("-", "")[:10] + children_table = f"album_{album_uuid}" - # Create the children table - self._create_children_table(children_table) + # Create the children table + self._create_children_table(children_table) - logger.info(f"Created album children table {children_table} for task {task_id}") - return children_table + logger.info(f"Created album children table {children_table} for task {task_id}") + return children_table - def create_children_table_for_playlist( - self, callback_data: Dict, task_id: str - ) -> str: - """ - Create children table for playlist download at the start and return table name. + def create_children_table_for_playlist( + self, callback_data: Dict, task_id: str + ) -> str: + """ + Create children table for playlist download at the start and return table name. - Args: - callback_data: Playlist callback object from deezspot - task_id: Celery task ID + Args: + callback_data: Playlist callback object from deezspot + task_id: Celery task ID - Returns: - Children table name - """ - # Generate children table name - playlist_uuid = str(uuid.uuid4()).replace("-", "")[:10] - children_table = f"playlist_{playlist_uuid}" + Returns: + Children table name + """ + # Generate children table name + playlist_uuid = str(uuid.uuid4()).replace("-", "")[:10] + children_table = f"playlist_{playlist_uuid}" - # Create the children table - self._create_children_table(children_table) + # Create the children table + self._create_children_table(children_table) - logger.info( - f"Created playlist children table {children_table} for task {task_id}" - ) - return children_table + logger.info( + f"Created playlist children table {children_table} for task {task_id}" + ) + return children_table - def store_track_history( - self, - callback_data: Dict, - task_id: str, - status: str = "completed", - table: str = "download_history", - ): - """ - Store individual track download history. + def store_track_history( + self, + callback_data: Dict, + task_id: str, + status: str = "completed", + table: str = "download_history", + ): + """ + Store individual track download history. - Args: - callback_data: Track callback object from deezspot - task_id: Celery task ID - status: Download status ('completed', 'failed', 'skipped') - table: Target table name (defaults to 'download_history', can be a children table name) - """ - try: - track = callback_data.get("track", {}) - status_info = callback_data.get("status_info", {}) + Args: + callback_data: Track callback object from deezspot + task_id: Celery task ID + status: Download status ('completed', 'failed', 'skipped') + table: Target table name (defaults to 'download_history', can be a children table name) + """ + try: + track = callback_data.get("track", {}) + status_info = callback_data.get("status_info", {}) - if not track: - logger.warning(f"No track data in callback for task {task_id}") - return + if not track: + logger.warning(f"No track data in callback for task {task_id}") + return - artists = self._extract_artists(track) - external_ids = self._extract_external_ids(track) + artists = self._extract_artists(track) + external_ids = self._extract_external_ids(track) - album = track.get("album", {}) - album_title = album.get("title", "") + # Prefer service/quality/bitrate from summary when available + summary = status_info.get("summary") or {} + service = summary.get("service") or self._get_primary_service(external_ids) + quality_format = summary.get("quality") or status_info.get("convert_to") + quality_bitrate = summary.get("bitrate") or status_info.get("bitrate") - # Prepare metadata - metadata = { - "callback_type": "track", - "parent": callback_data.get("parent"), - "current_track": callback_data.get("current_track"), - "total_tracks": callback_data.get("total_tracks"), - "album": album, - "status_info": status_info, - } + album = track.get("album", {}) + album_title = album.get("title", "") - with self._get_connection() as conn: - if table == "download_history": - # Store in main download_history table - logger.info( - f"Storing track '{track.get('title', 'Unknown')}' in MAIN table for task {task_id}" - ) - conn.execute( - """ + # Prepare metadata + metadata = { + "callback_type": "track", + "parent": callback_data.get("parent"), + "current_track": callback_data.get("current_track"), + "total_tracks": callback_data.get("total_tracks"), + "album": album, + "status_info": status_info, + } + + with self._get_connection() as conn: + if table == "download_history": + # Store in main download_history table + logger.info( + f"Storing track '{track.get('title', 'Unknown')}' in MAIN table for task {task_id}" + ) + conn.execute( + """ INSERT OR REPLACE INTO download_history ( download_type, title, artists, timestamp, status, service, quality_format, quality_bitrate, task_id, external_ids, @@ -479,186 +485,198 @@ class HistoryManager: duration_total_ms ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - ( - "track", - track.get("title", "Unknown"), - json.dumps(artists), - callback_data.get("timestamp", time.time()), - status, - self._get_primary_service(external_ids), - status_info.get("convert_to"), - status_info.get("bitrate"), - task_id, - json.dumps(external_ids), - json.dumps(metadata), - json.dumps(self._extract_release_date(album)), - json.dumps(track.get("genres", [])), - track.get("explicit", False), - album.get("album_type"), - track.get("duration_ms", 0), - ), - ) - else: - # Ensure target children table exists before write - self._create_children_table(table) - # Store in children table (for album/playlist tracks) - logger.info( - f"Storing track '{track.get('title', 'Unknown')}' in CHILDREN table '{table}' for task {task_id}" - ) - # Extract ISRC - isrc = external_ids.get("isrc", "") + ( + "track", + track.get("title", "Unknown"), + json.dumps(artists), + callback_data.get("timestamp", time.time()), + status, + service, + quality_format, + quality_bitrate, + task_id, + json.dumps(external_ids), + json.dumps(metadata), + json.dumps(self._extract_release_date(album)), + json.dumps(track.get("genres", [])), + track.get("explicit", False), + album.get("album_type"), + track.get("duration_ms", 0), + ), + ) + else: + # Ensure target children table exists before write + self._create_children_table(table) + # Store in children table (for album/playlist tracks) + logger.info( + f"Storing track '{track.get('title', 'Unknown')}' in CHILDREN table '{table}' for task {task_id}" + ) + # Extract ISRC + isrc = external_ids.get("isrc", "") - # Prepare children table metadata - children_metadata = { - "album": album, - "type": track.get("type", ""), - "callback_type": "track", - "parent": callback_data.get("parent"), - "current_track": callback_data.get("current_track"), - "total_tracks": callback_data.get("total_tracks"), - "status_info": status_info, - } + # Prepare children table metadata + children_metadata = { + "album": album, + "type": track.get("type", ""), + "callback_type": "track", + "parent": callback_data.get("parent"), + "current_track": callback_data.get("current_track"), + "total_tracks": callback_data.get("total_tracks"), + "status_info": status_info, + } - conn.execute( - f""" + conn.execute( + f""" INSERT INTO {table} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, isrc, timestamp, position, metadata ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - ( - track.get("title", "Unknown"), - json.dumps(artists), - album_title, - track.get("duration_ms", 0), - track.get("track_number", 0), - track.get("disc_number", 1), - track.get("explicit", False), - status, - json.dumps(external_ids), - json.dumps(track.get("genres", [])), - isrc, - callback_data.get("timestamp", time.time()), - track.get("position", 0), # For playlist tracks - json.dumps(children_metadata), - ), - ) + ( + track.get("title", "Unknown"), + json.dumps(artists), + album_title, + track.get("duration_ms", 0), + track.get("track_number", 0), + track.get("disc_number", 1), + track.get("explicit", False), + status, + json.dumps(external_ids), + json.dumps(track.get("genres", [])), + isrc, + callback_data.get("timestamp", time.time()), + track.get("position", 0), # For playlist tracks + json.dumps(children_metadata), + ), + ) - logger.info( - f"Successfully stored track '{track.get('title')}' in table '{table}' (task: {task_id})" - ) + logger.info( + f"Successfully stored track '{track.get('title')}' in table '{table}' (task: {task_id})" + ) - except Exception as e: - logger.error(f"Failed to store track history for task {task_id}: {e}") + except Exception as e: + logger.error(f"Failed to store track history for task {task_id}: {e}") - def store_album_history( - self, callback_data: Dict, task_id: str, status: str = "completed" - ): - """ - Store album download history with children table for individual tracks. + def store_album_history( + self, callback_data: Dict, task_id: str, status: str = "completed" + ): + """ + Store album download history with children table for individual tracks. - Args: - callback_data: Album callback object from deezspot - task_id: Celery task ID - status: Download status ('completed', 'failed', 'in_progress') + Args: + callback_data: Album callback object from deezspot + task_id: Celery task ID + status: Download status ('completed', 'failed', 'in_progress') - Returns: - Children table name when status is 'in_progress', None otherwise - """ - try: - album = callback_data.get("album", {}) - status_info = callback_data.get("status_info", {}) + Returns: + Children table name when status is 'in_progress', None otherwise + """ + try: + album = callback_data.get("album", {}) + status_info = callback_data.get("status_info", {}) - if not album: - logger.warning(f"No album data in callback for task {task_id}") - return None + if not album: + logger.warning(f"No album data in callback for task {task_id}") + return None - if status == "in_progress": - # Phase 1: Create children table at start, don't store album entry yet - children_table = self.create_children_table_for_album( - callback_data, task_id - ) - logger.info( - f"Album download started for task {task_id}, children table: {children_table}" - ) - return children_table + if status == "in_progress": + # Phase 1: Create children table at start, don't store album entry yet + children_table = self.create_children_table_for_album( + callback_data, task_id + ) + logger.info( + f"Album download started for task {task_id}, children table: {children_table}" + ) + return children_table - # Phase 2: Store album entry in main table (for completed/failed status) - artists = self._extract_artists(album) - external_ids = self._extract_external_ids(album) + # Phase 2: Store album entry in main table (for completed/failed status) + artists = self._extract_artists(album) + external_ids = self._extract_external_ids(album) - # For completed/failed, we need to find the existing children table - # This should be stored in task info by the celery task - from routes.utils.celery_tasks import get_task_info + # For completed/failed, we need to find the existing children table + # This should be stored in task info by the celery task + from routes.utils.celery_tasks import get_task_info - task_info = get_task_info(task_id) - children_table = task_info.get("children_table") + task_info = get_task_info(task_id) + children_table = task_info.get("children_table") - if not children_table: - # Fallback: generate new children table name (shouldn't happen in normal flow) - album_uuid = str(uuid.uuid4()).replace("-", "")[:10] - children_table = f"album_{album_uuid}" - logger.warning( - f"No children table found for album task {task_id}, generating new: {children_table}" - ) + if not children_table: + # Fallback: generate new children table name (shouldn't happen in normal flow) + album_uuid = str(uuid.uuid4()).replace("-", "")[:10] + children_table = f"album_{album_uuid}" + logger.warning( + f"No children table found for album task {task_id}, generating new: {children_table}" + ) - # Extract summary data if available (from 'done' status) - summary = status_info.get("summary", {}) - successful_tracks = summary.get("total_successful", 0) - failed_tracks = summary.get("total_failed", 0) - skipped_tracks = summary.get("total_skipped", 0) - total_tracks = summary.get("total_successful", 0) + summary.get( - "total_skipped", 0 - ) + summary.get("total_failed", 0) or album.get("total_tracks", 0) + # Extract summary data if available (from 'done' status) + summary = status_info.get("summary", {}) + successful_tracks = summary.get("total_successful", 0) + failed_tracks = summary.get("total_failed", 0) + skipped_tracks = summary.get("total_skipped", 0) + total_tracks = summary.get("total_successful", 0) + summary.get( + "total_skipped", 0 + ) + summary.get("total_failed", 0) or album.get("total_tracks", 0) - # Enrich album metadata if missing - try: - album = self._enrich_album_metadata_from_summary(album, summary) - except Exception: - pass + # Enrich album metadata if missing + try: + album = self._enrich_album_metadata_from_summary(album, summary) + except Exception: + pass - # Calculate total duration - tracks = album.get("tracks", []) - total_duration = self._calculate_total_duration(tracks) + # Calculate total duration + tracks = album.get("tracks", []) + total_duration = self._calculate_total_duration(tracks) - # Derive accurate status if we have counters - status_to_store = status - try: - if total_tracks: - if ( - successful_tracks >= total_tracks - and failed_tracks == 0 - and skipped_tracks == 0 - ): - status_to_store = "completed" - elif successful_tracks > 0: - status_to_store = "partial" - else: - # None succeeded but there are failures/skips or unknown issues - status_to_store = "failed" - else: - # Fallback: if any failure recorded, mark failed/partial conservatively - if failed_tracks > 0 and successful_tracks == 0: - status_to_store = "failed" - elif failed_tracks > 0 and successful_tracks > 0: - status_to_store = "partial" - except Exception: - # Keep provided status - pass + # Derive accurate status if we have counters + status_to_store = status + try: + if total_tracks: + if ( + successful_tracks >= total_tracks + and failed_tracks == 0 + and skipped_tracks == 0 + ): + status_to_store = "completed" + elif successful_tracks > 0: + status_to_store = "partial" + else: + # None succeeded but there are failures/skips or unknown issues + status_to_store = "failed" + else: + # Fallback: if any failure recorded, mark failed/partial conservatively + if failed_tracks > 0 and successful_tracks == 0: + status_to_store = "failed" + elif failed_tracks > 0 and successful_tracks > 0: + status_to_store = "partial" + except Exception: + # Keep provided status + pass - # Prepare metadata - metadata = { - "callback_type": "album", - "status_info": status_info, - "copyrights": album.get("copyrights", []), - "tracks": tracks, # Store track list in metadata - } + # Prepare metadata + metadata = { + "callback_type": "album", + "status_info": status_info, + "copyrights": album.get("copyrights", []), + "tracks": tracks, # Store track list in metadata + } - with self._get_connection() as conn: - # Store main album entry - conn.execute( - """ + with self._get_connection() as conn: + # Store main album entry + + # Prefer service/quality/bitrate from summary when available + service_to_store = summary.get("service") or self._get_primary_service( + external_ids + ) + quality_format_to_store = summary.get("quality") or status_info.get( + "convert_to" + ) + quality_bitrate_to_store = summary.get("bitrate") or status_info.get( + "bitrate" + ) + + conn.execute( + """ INSERT OR REPLACE INTO download_history ( download_type, title, artists, timestamp, status, service, quality_format, quality_bitrate, total_tracks, successful_tracks, @@ -667,181 +685,193 @@ class HistoryManager: album_type, duration_total_ms ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - ( - "album", - album.get("title", "Unknown"), - json.dumps(artists), - callback_data.get("timestamp", time.time()), - status_to_store, - self._get_primary_service(external_ids), - status_info.get("convert_to"), - status_info.get("bitrate"), - total_tracks, - successful_tracks, - failed_tracks, - skipped_tracks, - children_table, - task_id, - json.dumps(external_ids), - json.dumps(metadata), - json.dumps(self._extract_release_date(album)), - json.dumps(album.get("genres", [])), - json.dumps(self._extract_images(album)), - album.get("album_type"), - total_duration, - ), - ) + ( + "album", + album.get("title", "Unknown"), + json.dumps(artists), + callback_data.get("timestamp", time.time()), + status_to_store, + service_to_store, + quality_format_to_store, + quality_bitrate_to_store, + total_tracks, + successful_tracks, + failed_tracks, + skipped_tracks, + children_table, + task_id, + json.dumps(external_ids), + json.dumps(metadata), + json.dumps(self._extract_release_date(album)), + json.dumps(album.get("genres", [])), + json.dumps(self._extract_images(album)), + album.get("album_type"), + total_duration, + ), + ) - # If we have a summary (e.g., on cancellation), populate children from it including failed ones - try: - if summary: - self._populate_album_children_table( - children_table, summary, album.get("title", "") - ) - except Exception as e: - logger.warning( - f"Failed to populate children from summary for album {children_table}: {e}" - ) + # If we have a summary (e.g., on cancellation), populate children from it including failed ones + try: + if summary: + self._populate_album_children_table( + children_table, summary, album.get("title", "") + ) + except Exception as e: + logger.warning( + f"Failed to populate children from summary for album {children_table}: {e}" + ) - logger.info( - f"Stored album history for '{album.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})" - ) - return None + logger.info( + f"Stored album history for '{album.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})" + ) + return None - except Exception as e: - logger.error(f"Failed to store album history for task {task_id}: {e}") - return None + except Exception as e: + logger.error(f"Failed to store album history for task {task_id}: {e}") + return None - def store_playlist_history( - self, callback_data: Dict, task_id: str, status: str = "completed" - ): - """ - Store playlist download history with children table for individual tracks. + def store_playlist_history( + self, callback_data: Dict, task_id: str, status: str = "completed" + ): + """ + Store playlist download history with children table for individual tracks. - Args: - callback_data: Playlist callback object from deezspot - task_id: Celery task ID - status: Download status ('completed', 'failed', 'in_progress') + Args: + callback_data: Playlist callback object from deezspot + task_id: Celery task ID + status: Download status ('completed', 'failed', 'in_progress') - Returns: - Children table name when status is 'in_progress', None otherwise - """ - try: - playlist = callback_data.get("playlist", {}) - status_info = callback_data.get("status_info", {}) + Returns: + Children table name when status is 'in_progress', None otherwise + """ + try: + playlist = callback_data.get("playlist", {}) + status_info = callback_data.get("status_info", {}) - if not playlist: - logger.warning(f"No playlist data in callback for task {task_id}") - return None + if not playlist: + logger.warning(f"No playlist data in callback for task {task_id}") + return None - if status == "in_progress": - # Phase 1: Create children table at start, don't store playlist entry yet - children_table = self.create_children_table_for_playlist( - callback_data, task_id - ) - logger.info( - f"Playlist download started for task {task_id}, children table: {children_table}" - ) - return children_table + if status == "in_progress": + # Phase 1: Create children table at start, don't store playlist entry yet + children_table = self.create_children_table_for_playlist( + callback_data, task_id + ) + logger.info( + f"Playlist download started for task {task_id}, children table: {children_table}" + ) + return children_table - # Phase 2: Store playlist entry in main table (for completed/failed status) - external_ids = self._extract_external_ids(playlist) + # Phase 2: Store playlist entry in main table (for completed/failed status) + external_ids = self._extract_external_ids(playlist) - # For completed/failed, we need to find the existing children table - # This should be stored in task info by the celery task - from routes.utils.celery_tasks import get_task_info + # For completed/failed, we need to find the existing children table + # This should be stored in task info by the celery task + from routes.utils.celery_tasks import get_task_info - task_info = get_task_info(task_id) - children_table = task_info.get("children_table") + task_info = get_task_info(task_id) + children_table = task_info.get("children_table") - if not children_table: - # Fallback: generate new children table name (shouldn't happen in normal flow) - playlist_uuid = str(uuid.uuid4()).replace("-", "")[:10] - children_table = f"playlist_{playlist_uuid}" - logger.warning( - f"No children table found for playlist task {task_id}, generating new: {children_table}" - ) + if not children_table: + # Fallback: generate new children table name (shouldn't happen in normal flow) + playlist_uuid = str(uuid.uuid4()).replace("-", "")[:10] + children_table = f"playlist_{playlist_uuid}" + logger.warning( + f"No children table found for playlist task {task_id}, generating new: {children_table}" + ) - # Extract summary data if available - summary = status_info.get("summary", {}) - successful_tracks = summary.get("total_successful", 0) - failed_tracks = summary.get("total_failed", 0) - skipped_tracks = summary.get("total_skipped", 0) + # Extract summary data if available + summary = status_info.get("summary", {}) + successful_tracks = summary.get("total_successful", 0) + failed_tracks = summary.get("total_failed", 0) + skipped_tracks = summary.get("total_skipped", 0) - # Improve metadata for playlist main row using summary first success/skip/failed track - try: - if not playlist.get("images"): - for arr_key in ( - "successful_tracks", - "skipped_tracks", - "failed_tracks", - ): - arr = summary.get(arr_key, []) or [] - candidate = ( - ( - arr[0].get("album") - if arr_key == "failed_tracks" - and isinstance(arr[0], dict) - else ( - arr[0].get("album") - if arr and isinstance(arr[0], dict) - else {} - ) - ) - if arr - else {} - ) - if candidate and candidate.get("images"): - playlist.setdefault("images", candidate.get("images", [])) - break - except Exception: - pass + # Improve metadata for playlist main row using summary first success/skip/failed track + try: + if not playlist.get("images"): + for arr_key in ( + "successful_tracks", + "skipped_tracks", + "failed_tracks", + ): + arr = summary.get(arr_key, []) or [] + candidate = ( + ( + arr[0].get("album") + if arr_key == "failed_tracks" + and isinstance(arr[0], dict) + else ( + arr[0].get("album") + if arr and isinstance(arr[0], dict) + else {} + ) + ) + if arr + else {} + ) + if candidate and candidate.get("images"): + playlist.setdefault("images", candidate.get("images", [])) + break + except Exception: + pass - tracks = playlist.get("tracks", []) - total_tracks = ( - summary.get("total_successful", 0) - + summary.get("total_skipped", 0) - + summary.get("total_failed", 0) - ) or len(tracks) - total_duration = self._calculate_total_duration(tracks) + tracks = playlist.get("tracks", []) + total_tracks = ( + summary.get("total_successful", 0) + + summary.get("total_skipped", 0) + + summary.get("total_failed", 0) + ) or len(tracks) + total_duration = self._calculate_total_duration(tracks) - # Derive accurate status - status_to_store = status - try: - if total_tracks: - if ( - successful_tracks >= total_tracks - and failed_tracks == 0 - and skipped_tracks == 0 - ): - status_to_store = "completed" - elif successful_tracks > 0: - status_to_store = "partial" - else: - status_to_store = "failed" - else: - if failed_tracks > 0 and successful_tracks == 0: - status_to_store = "failed" - elif failed_tracks > 0 and successful_tracks > 0: - status_to_store = "partial" - except Exception: - pass + # Derive accurate status + status_to_store = status + try: + if total_tracks: + if ( + successful_tracks >= total_tracks + and failed_tracks == 0 + and skipped_tracks == 0 + ): + status_to_store = "completed" + elif successful_tracks > 0: + status_to_store = "partial" + else: + status_to_store = "failed" + else: + if failed_tracks > 0 and successful_tracks == 0: + status_to_store = "failed" + elif failed_tracks > 0 and successful_tracks > 0: + status_to_store = "partial" + except Exception: + pass - # Extract owner information - owner = playlist.get("owner", {}) + # Extract owner information + owner = playlist.get("owner", {}) - # Prepare metadata - metadata = { - "callback_type": "playlist", - "status_info": status_info, - "description": playlist.get("description", ""), - "tracks": tracks, # Store track list in metadata - } + # Prepare metadata + metadata = { + "callback_type": "playlist", + "status_info": status_info, + "description": playlist.get("description", ""), + "tracks": tracks, # Store track list in metadata + } - with self._get_connection() as conn: - # Store main playlist entry - conn.execute( - """ + with self._get_connection() as conn: + # Store main playlist entry + + # Prefer service/quality/bitrate from summary when available + service_to_store = summary.get("service") or self._get_primary_service( + external_ids + ) + quality_format_to_store = summary.get("quality") or status_info.get( + "convert_to" + ) + quality_bitrate_to_store = summary.get("bitrate") or status_info.get( + "bitrate" + ) + + conn.execute( + """ INSERT OR REPLACE INTO download_history ( download_type, title, artists, timestamp, status, service, quality_format, quality_bitrate, total_tracks, successful_tracks, @@ -850,321 +880,319 @@ class HistoryManager: duration_total_ms ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - ( - "playlist", - playlist.get("title", "Unknown"), - json.dumps( - [owner.get("name", "Unknown")] - ), # Use owner as "artist" - callback_data.get("timestamp", time.time()), - status_to_store, - self._get_primary_service(external_ids), - status_info.get("convert_to"), - status_info.get("bitrate"), - total_tracks, - successful_tracks, - failed_tracks, - skipped_tracks, - children_table, - task_id, - json.dumps(external_ids), - json.dumps(metadata), - json.dumps([]), # Playlists don't have genres typically - json.dumps(self._extract_images(playlist)), - json.dumps(owner), - total_duration, - ), - ) + ( + "playlist", + playlist.get("title", "Unknown"), + json.dumps([owner.get("name", "Unknown")]), + callback_data.get("timestamp", time.time()), + status_to_store, + service_to_store, + quality_format_to_store, + quality_bitrate_to_store, + total_tracks, + successful_tracks, + failed_tracks, + skipped_tracks, + children_table, + task_id, + json.dumps(external_ids), + json.dumps(metadata), + json.dumps([]), + json.dumps(self._extract_images(playlist)), + json.dumps(owner), + total_duration, + ), + ) - # If we have a summary (e.g., on cancellation), populate children from it including failed ones - try: - if summary: - self._populate_playlist_children_table(children_table, summary) - except Exception as e: - logger.warning( - f"Failed to populate children from summary for playlist {children_table}: {e}" - ) + # If we have a summary (e.g., on cancellation), populate children from it including failed ones + try: + if summary: + self._populate_playlist_children_table(children_table, summary) + except Exception as e: + logger.warning( + f"Failed to populate children from summary for playlist {children_table}: {e}" + ) - logger.info( - f"Stored playlist history for '{playlist.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})" - ) - return None + logger.info( + f"Stored playlist history for '{playlist.get('title')}' (task: {task_id}, children: {children_table}, status: {status_to_store})" + ) + return None - except Exception as e: - logger.error(f"Failed to store playlist history for task {task_id}: {e}") - return None + except Exception as e: + logger.error(f"Failed to store playlist history for task {task_id}: {e}") + return None - def _populate_album_children_table( - self, table_name: str, summary: Dict, album_title: str - ): - """Populate children table with individual track records from album summary.""" - try: - # Ensure table exists before population - self._create_children_table(table_name) - all_rows = [] + def _populate_album_children_table( + self, table_name: str, summary: Dict, album_title: str + ): + """Populate children table with individual track records from album summary.""" + try: + # Ensure table exists before population + self._create_children_table(table_name) + all_rows = [] - # Add successful tracks - for track in summary.get("successful_tracks", []): - track_data = self._prepare_child_track_data( - track, album_title, "completed" - ) - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add successful tracks + for track in summary.get("successful_tracks", []): + track_data = self._prepare_child_track_data( + track, album_title, "completed" + ) + all_rows.append(self._map_values_to_row(track_data["values"])) - # Add failed tracks - for failed_item in summary.get("failed_tracks", []): - track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data( - track, album_title, "failed" - ) - track_data["metadata"]["failure_reason"] = failed_item.get( - "reason", "Unknown error" - ) - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add failed tracks + for failed_item in summary.get("failed_tracks", []): + track = failed_item.get("track", {}) + track_data = self._prepare_child_track_data( + track, album_title, "failed" + ) + track_data["metadata"]["failure_reason"] = failed_item.get( + "reason", "Unknown error" + ) + all_rows.append(self._map_values_to_row(track_data["values"])) - # Add skipped tracks - for track in summary.get("skipped_tracks", []): - track_data = self._prepare_child_track_data( - track, album_title, "skipped" - ) - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add skipped tracks + for track in summary.get("skipped_tracks", []): + track_data = self._prepare_child_track_data( + track, album_title, "skipped" + ) + all_rows.append(self._map_values_to_row(track_data["values"])) - # Upsert all rows - with self._get_connection() as conn: - for row in all_rows: - self._upsert_child_row(conn, table_name, row) + # Upsert all rows + with self._get_connection() as conn: + for row in all_rows: + self._upsert_child_row(conn, table_name, row) - logger.info( - f"Populated {len(all_rows)} tracks in children table {table_name}" - ) + logger.info( + f"Populated {len(all_rows)} tracks in children table {table_name}" + ) - except Exception as e: - logger.error(f"Failed to populate album children table {table_name}: {e}") + except Exception as e: + logger.error(f"Failed to populate album children table {table_name}: {e}") - def _populate_playlist_children_table(self, table_name: str, summary: Dict): - """Populate children table with individual track records from playlist summary.""" - try: - # Ensure table exists before population - self._create_children_table(table_name) - all_rows = [] + def _populate_playlist_children_table(self, table_name: str, summary: Dict): + """Populate children table with individual track records from playlist summary.""" + try: + # Ensure table exists before population + self._create_children_table(table_name) + all_rows = [] - # Add successful tracks - for track in summary.get("successful_tracks", []): - track_data = self._prepare_child_track_data(track, "", "completed") - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add successful tracks + for track in summary.get("successful_tracks", []): + track_data = self._prepare_child_track_data(track, "", "completed") + all_rows.append(self._map_values_to_row(track_data["values"])) - # Add failed tracks - for failed_item in summary.get("failed_tracks", []): - track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data(track, "", "failed") - track_data["metadata"]["failure_reason"] = failed_item.get( - "reason", "Unknown error" - ) - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add failed tracks + for failed_item in summary.get("failed_tracks", []): + track = failed_item.get("track", {}) + track_data = self._prepare_child_track_data(track, "", "failed") + track_data["metadata"]["failure_reason"] = failed_item.get( + "reason", "Unknown error" + ) + all_rows.append(self._map_values_to_row(track_data["values"])) - # Add skipped tracks - for track in summary.get("skipped_tracks", []): - track_data = self._prepare_child_track_data(track, "", "skipped") - all_rows.append(self._map_values_to_row(track_data["values"])) + # Add skipped tracks + for track in summary.get("skipped_tracks", []): + track_data = self._prepare_child_track_data(track, "", "skipped") + all_rows.append(self._map_values_to_row(track_data["values"])) - with self._get_connection() as conn: - for row in all_rows: - self._upsert_child_row(conn, table_name, row) + with self._get_connection() as conn: + for row in all_rows: + self._upsert_child_row(conn, table_name, row) - logger.info( - f"Populated {len(all_rows)} tracks in children table {table_name}" - ) + logger.info( + f"Populated {len(all_rows)} tracks in children table {table_name}" + ) - except Exception as e: - logger.error( - f"Failed to populate playlist children table {table_name}: {e}" - ) + except Exception as e: + logger.error( + f"Failed to populate playlist children table {table_name}: {e}" + ) - def _prepare_child_track_data( - self, track: Dict, default_album: str, status: str - ) -> Dict: - """Prepare track data for insertion into children table.""" - artists = self._extract_artists(track) - external_ids = self._extract_external_ids(track) + def _prepare_child_track_data( + self, track: Dict, default_album: str, status: str + ) -> Dict: + """Prepare track data for insertion into children table.""" + artists = self._extract_artists(track) + external_ids = self._extract_external_ids(track) - # Get album info - album = track.get("album", {}) - album_title = album.get("title", default_album) + # Get album info + album = track.get("album", {}) + album_title = album.get("title", default_album) - # Extract ISRC - isrc = external_ids.get("isrc", "") + # Extract ISRC + isrc = external_ids.get("isrc", "") - # Prepare metadata - metadata = {"album": album, "type": track.get("type", "")} + # Prepare metadata + metadata = {"album": album, "type": track.get("type", "")} - values = ( - track.get("title", "Unknown"), - json.dumps(artists), - album_title, - track.get("duration_ms", 0), - track.get("track_number", 0), - track.get("disc_number", 1), - track.get("explicit", False), - status, - json.dumps(external_ids), - json.dumps(track.get("genres", [])), - isrc, - time.time(), - track.get("position", 0), # For playlist tracks - json.dumps(metadata), - ) + values = ( + track.get("title", "Unknown"), + json.dumps(artists), + album_title, + track.get("duration_ms", 0), + track.get("track_number", 0), + track.get("disc_number", 1), + track.get("explicit", False), + status, + json.dumps(external_ids), + json.dumps(track.get("genres", [])), + isrc, + time.time(), + track.get("position", 0), # For playlist tracks + json.dumps(metadata), + ) - return {"values": values, "metadata": metadata} + return {"values": values, "metadata": metadata} - def update_download_status(self, task_id: str, status: str): - """Update download status for existing history entry.""" - try: - with self._get_connection() as conn: - conn.execute( - """ + def update_download_status(self, task_id: str, status: str): + """Update download status for existing history entry.""" + try: + with self._get_connection() as conn: + conn.execute( + """ UPDATE download_history SET status = ? WHERE task_id = ? """, - (status, task_id), - ) + (status, task_id), + ) - logger.info(f"Updated download status to '{status}' for task {task_id}") + logger.info(f"Updated download status to '{status}' for task {task_id}") - except Exception as e: - logger.error(f"Failed to update download status for task {task_id}: {e}") + except Exception as e: + logger.error(f"Failed to update download status for task {task_id}: {e}") - def get_download_history( - self, - limit: int = 100, - offset: int = 0, - download_type: Optional[str] = None, - status: Optional[str] = None, - ) -> List[Dict]: - """ - Retrieve download history with optional filtering. + def get_download_history( + self, + limit: int = 100, + offset: int = 0, + download_type: Optional[str] = None, + status: Optional[str] = None, + ) -> List[Dict]: + """ + Retrieve download history with optional filtering. - Args: - limit: Maximum number of records to return - offset: Number of records to skip - download_type: Filter by download type ('track', 'album', 'playlist') - status: Filter by status ('completed', 'failed', 'skipped', 'in_progress') + Args: + limit: Maximum number of records to return + offset: Number of records to skip + download_type: Filter by download type ('track', 'album', 'playlist') + status: Filter by status ('completed', 'failed', 'skipped', 'in_progress') - Returns: - List of download history records - """ - try: - query = "SELECT * FROM download_history" - params: List[Union[str, int]] = [] - conditions = [] + Returns: + List of download history records + """ + try: + query = "SELECT * FROM download_history" + params: List[Union[str, int]] = [] + conditions = [] - if download_type: - conditions.append("download_type = ?") - params.append(download_type) + if download_type: + conditions.append("download_type = ?") + params.append(download_type) - if status: - conditions.append("status = ?") - params.append(status) + if status: + conditions.append("status = ?") + params.append(status) - if conditions: - query += " WHERE " + " AND ".join(conditions) + if conditions: + query += " WHERE " + " AND ".join(conditions) - query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?" - params.extend([limit, offset]) + query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?" + params.extend([limit, offset]) - with self._get_connection() as conn: - cursor = conn.execute(query, params) - rows = cursor.fetchall() + with self._get_connection() as conn: + cursor = conn.execute(query, params) + rows = cursor.fetchall() - # Convert to list of dicts - result = [] - for row in rows: - record = dict(row) - # Parse JSON fields - for field in [ - "artists", - "external_ids", - "metadata", - "release_date", - "genres", - "images", - "owner", - ]: - if record.get(field): - try: - record[field] = json.loads(record[field]) - except (json.JSONDecodeError, TypeError): - pass - result.append(record) + # Convert to list of dicts + result = [] + for row in rows: + record = dict(row) + # Parse JSON fields + for field in [ + "artists", + "external_ids", + "metadata", + "release_date", + "genres", + "images", + "owner", + ]: + if record.get(field): + try: + record[field] = json.loads(record[field]) + except (json.JSONDecodeError, TypeError): + pass + result.append(record) - return result + return result - except Exception as e: - logger.error(f"Failed to retrieve download history: {e}") - return [] + except Exception as e: + logger.error(f"Failed to retrieve download history: {e}") + return [] - def get_children_history(self, children_table: str) -> List[Dict]: - """ - Retrieve track history from a children table. + def get_children_history(self, children_table: str) -> List[Dict]: + """ + Retrieve track history from a children table. - Args: - children_table: Name of the children table + Args: + children_table: Name of the children table - Returns: - List of track records - """ - try: - # Ensure table exists before reading - self._create_children_table(children_table) - with self._get_connection() as conn: - cursor = conn.execute(f""" + Returns: + List of track records + """ + try: + # Ensure table exists before reading + self._create_children_table(children_table) + with self._get_connection() as conn: + cursor = conn.execute(f""" SELECT * FROM {children_table} ORDER BY track_number, position """) - rows = cursor.fetchall() + rows = cursor.fetchall() - # Convert to list of dicts - result = [] - for row in rows: - record = dict(row) - # Parse JSON fields - for field in ["artists", "external_ids", "genres", "metadata"]: - if record.get(field): - try: - record[field] = json.loads(record[field]) - except (json.JSONDecodeError, TypeError): - pass - result.append(record) + # Convert to list of dicts + result = [] + for row in rows: + record = dict(row) + # Parse JSON fields + for field in ["artists", "external_ids", "genres", "metadata"]: + if record.get(field): + try: + record[field] = json.loads(record[field]) + except (json.JSONDecodeError, TypeError): + pass + result.append(record) - return result + return result - except Exception as e: - logger.error( - f"Failed to retrieve children history from {children_table}: {e}" - ) - return [] + except Exception as e: + logger.error( + f"Failed to retrieve children history from {children_table}: {e}" + ) + return [] - def get_download_stats(self) -> Dict: - """Get download statistics.""" - try: - with self._get_connection() as conn: - # Total downloads by type - cursor = conn.execute(""" + def get_download_stats(self) -> Dict: + """Get download statistics.""" + try: + with self._get_connection() as conn: + # Total downloads by type + cursor = conn.execute(""" SELECT download_type, status, COUNT(*) as count FROM download_history GROUP BY download_type, status """) - type_stats: Dict[str, Dict[str, int]] = {} - for row in cursor.fetchall(): - download_type = row["download_type"] - status = row["status"] - count = row["count"] + type_stats: Dict[str, Dict[str, int]] = {} + for row in cursor.fetchall(): + download_type = row["download_type"] + status = row["status"] + count = row["count"] - if download_type not in type_stats: - type_stats[download_type] = {} - type_stats[download_type][status] = count + if download_type not in type_stats: + type_stats[download_type] = {} + type_stats[download_type][status] = count - # Total tracks downloaded (including from albums/playlists) - cursor = conn.execute(""" + # Total tracks downloaded (including from albums/playlists) + cursor = conn.execute(""" SELECT SUM( CASE WHEN download_type = 'track' AND status = 'completed' THEN 1 @@ -1173,441 +1201,441 @@ class HistoryManager: ) as total_successful_tracks FROM download_history """) - total_tracks = cursor.fetchone()["total_successful_tracks"] or 0 + total_tracks = cursor.fetchone()["total_successful_tracks"] or 0 - # Recent downloads (last 7 days) - week_ago = time.time() - (7 * 24 * 60 * 60) - cursor = conn.execute( - """ + # Recent downloads (last 7 days) + week_ago = time.time() - (7 * 24 * 60 * 60) + cursor = conn.execute( + """ SELECT COUNT(*) as count FROM download_history WHERE timestamp > ? """, - (week_ago,), - ) - recent_downloads = cursor.fetchone()["count"] + (week_ago,), + ) + recent_downloads = cursor.fetchone()["count"] - return { - "by_type_and_status": type_stats, - "total_successful_tracks": total_tracks, - "recent_downloads_7d": recent_downloads, - } + return { + "by_type_and_status": type_stats, + "total_successful_tracks": total_tracks, + "recent_downloads_7d": recent_downloads, + } - except Exception as e: - logger.error(f"Failed to get download stats: {e}") - return {} + except Exception as e: + logger.error(f"Failed to get download stats: {e}") + return {} - def search_history(self, query: str, limit: int = 50) -> List[Dict]: - """ - Search download history by title or artist. + def search_history(self, query: str, limit: int = 50) -> List[Dict]: + """ + Search download history by title or artist. - Args: - query: Search query for title or artist - limit: Maximum number of results + Args: + query: Search query for title or artist + limit: Maximum number of results - Returns: - List of matching download records - """ - try: - search_pattern = f"%{query}%" + Returns: + List of matching download records + """ + try: + search_pattern = f"%{query}%" - with self._get_connection() as conn: - cursor = conn.execute( - """ + with self._get_connection() as conn: + cursor = conn.execute( + """ SELECT * FROM download_history WHERE title LIKE ? OR artists LIKE ? ORDER BY timestamp DESC LIMIT ? """, - (search_pattern, search_pattern, limit), - ) + (search_pattern, search_pattern, limit), + ) - rows = cursor.fetchall() + rows = cursor.fetchall() - # Convert to list of dicts - result = [] - for row in rows: - record = dict(row) - # Parse JSON fields - for field in [ - "artists", - "external_ids", - "metadata", - "release_date", - "genres", - "images", - "owner", - ]: - if record.get(field): - try: - record[field] = json.loads(record[field]) - except (json.JSONDecodeError, TypeError): - pass - result.append(record) + # Convert to list of dicts + result = [] + for row in rows: + record = dict(row) + # Parse JSON fields + for field in [ + "artists", + "external_ids", + "metadata", + "release_date", + "genres", + "images", + "owner", + ]: + if record.get(field): + try: + record[field] = json.loads(record[field]) + except (json.JSONDecodeError, TypeError): + pass + result.append(record) - return result + return result - except Exception as e: - logger.error(f"Failed to search download history: {e}") - return [] + except Exception as e: + logger.error(f"Failed to search download history: {e}") + return [] - def get_download_by_task_id(self, task_id: str) -> Optional[Dict]: - """ - Get download history entry by task ID. + def get_download_by_task_id(self, task_id: str) -> Optional[Dict]: + """ + Get download history entry by task ID. - Args: - task_id: Celery task ID + Args: + task_id: Celery task ID - Returns: - Download record or None if not found - """ - try: - with self._get_connection() as conn: - cursor = conn.execute( - """ + Returns: + Download record or None if not found + """ + try: + with self._get_connection() as conn: + cursor = conn.execute( + """ SELECT * FROM download_history WHERE task_id = ? LIMIT 1 """, - (task_id,), - ) + (task_id,), + ) - row = cursor.fetchone() - if not row: - return None + row = cursor.fetchone() + if not row: + return None - record = dict(row) - # Parse JSON fields - for field in [ - "artists", - "external_ids", - "metadata", - "release_date", - "genres", - "images", - "owner", - ]: - if record.get(field): - try: - record[field] = json.loads(record[field]) - except (json.JSONDecodeError, TypeError): - pass + record = dict(row) + # Parse JSON fields + for field in [ + "artists", + "external_ids", + "metadata", + "release_date", + "genres", + "images", + "owner", + ]: + if record.get(field): + try: + record[field] = json.loads(record[field]) + except (json.JSONDecodeError, TypeError): + pass - return record + return record - except Exception as e: - logger.error(f"Failed to get download by task ID {task_id}: {e}") - return None + except Exception as e: + logger.error(f"Failed to get download by task ID {task_id}: {e}") + return None - def get_recent_downloads(self, limit: int = 20) -> List[Dict]: - """Get most recent downloads.""" - return self.get_download_history(limit=limit, offset=0) + def get_recent_downloads(self, limit: int = 20) -> List[Dict]: + """Get most recent downloads.""" + return self.get_download_history(limit=limit, offset=0) - def get_failed_downloads(self, limit: int = 50) -> List[Dict]: - """Get failed downloads.""" - return self.get_download_history(limit=limit, status="failed") + def get_failed_downloads(self, limit: int = 50) -> List[Dict]: + """Get failed downloads.""" + return self.get_download_history(limit=limit, status="failed") - def clear_old_history(self, days_old: int = 30) -> int: - """ - Clear download history older than specified days. + def clear_old_history(self, days_old: int = 30) -> int: + """ + Clear download history older than specified days. - Args: - days_old: Number of days old to keep (default 30) + Args: + days_old: Number of days old to keep (default 30) - Returns: - Number of records deleted - """ - try: - cutoff_time = time.time() - (days_old * 24 * 60 * 60) + Returns: + Number of records deleted + """ + try: + cutoff_time = time.time() - (days_old * 24 * 60 * 60) - with self._get_connection() as conn: - # Get list of children tables to delete - cursor = conn.execute( - """ + with self._get_connection() as conn: + # Get list of children tables to delete + cursor = conn.execute( + """ SELECT children_table FROM download_history WHERE timestamp < ? AND children_table IS NOT NULL """, - (cutoff_time,), - ) + (cutoff_time,), + ) - children_tables = [row["children_table"] for row in cursor.fetchall()] + children_tables = [row["children_table"] for row in cursor.fetchall()] - # Delete main history records - cursor = conn.execute( - """ + # Delete main history records + cursor = conn.execute( + """ DELETE FROM download_history WHERE timestamp < ? """, - (cutoff_time,), - ) + (cutoff_time,), + ) - deleted_count = cursor.rowcount + deleted_count = cursor.rowcount - # Drop children tables - for table_name in children_tables: - try: - conn.execute(f"DROP TABLE IF EXISTS {table_name}") - except Exception as e: - logger.warning( - f"Failed to drop children table {table_name}: {e}" - ) + # Drop children tables + for table_name in children_tables: + try: + conn.execute(f"DROP TABLE IF EXISTS {table_name}") + except Exception as e: + logger.warning( + f"Failed to drop children table {table_name}: {e}" + ) - logger.info( - f"Cleared {deleted_count} old history records and {len(children_tables)} children tables" - ) - return deleted_count + logger.info( + f"Cleared {deleted_count} old history records and {len(children_tables)} children tables" + ) + return deleted_count - except Exception as e: - logger.error(f"Failed to clear old history: {e}") - return 0 + except Exception as e: + logger.error(f"Failed to clear old history: {e}") + return 0 - # --- New helpers for failed children insertion and metadata enrichment --- - def _populate_failed_children_for_album( - self, table_name: str, summary: Dict, album_title: str - ) -> None: - try: - self._create_children_table(table_name) - with self._get_connection() as conn: - for failed_item in summary.get("failed_tracks", []): - track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data( - track, album_title, "failed" - ) - track_data["metadata"]["failure_reason"] = failed_item.get( - "reason", "cancelled" - ) - conn.execute( - f""" + # --- New helpers for failed children insertion and metadata enrichment --- + def _populate_failed_children_for_album( + self, table_name: str, summary: Dict, album_title: str + ) -> None: + try: + self._create_children_table(table_name) + with self._get_connection() as conn: + for failed_item in summary.get("failed_tracks", []): + track = failed_item.get("track", {}) + track_data = self._prepare_child_track_data( + track, album_title, "failed" + ) + track_data["metadata"]["failure_reason"] = failed_item.get( + "reason", "cancelled" + ) + conn.execute( + f""" INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, isrc, timestamp, position, metadata ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - track_data["values"], - ) - except Exception as e: - logger.error( - f"Failed to insert failed children for album into {table_name}: {e}" - ) + track_data["values"], + ) + except Exception as e: + logger.error( + f"Failed to insert failed children for album into {table_name}: {e}" + ) - def _populate_failed_children_for_playlist( - self, table_name: str, summary: Dict - ) -> None: - try: - self._create_children_table(table_name) - with self._get_connection() as conn: - for failed_item in summary.get("failed_tracks", []): - track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data(track, "", "failed") - track_data["metadata"]["failure_reason"] = failed_item.get( - "reason", "cancelled" - ) - conn.execute( - f""" + def _populate_failed_children_for_playlist( + self, table_name: str, summary: Dict + ) -> None: + try: + self._create_children_table(table_name) + with self._get_connection() as conn: + for failed_item in summary.get("failed_tracks", []): + track = failed_item.get("track", {}) + track_data = self._prepare_child_track_data(track, "", "failed") + track_data["metadata"]["failure_reason"] = failed_item.get( + "reason", "cancelled" + ) + conn.execute( + f""" INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, isrc, timestamp, position, metadata ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - track_data["values"], - ) - except Exception as e: - logger.error( - f"Failed to insert failed children for playlist into {table_name}: {e}" - ) + track_data["values"], + ) + except Exception as e: + logger.error( + f"Failed to insert failed children for playlist into {table_name}: {e}" + ) - def _enrich_album_metadata_from_summary(self, album: Dict, summary: Dict) -> Dict: - if album.get("images") and album.get("release_date") and album.get("genres"): - return album - # Prefer successful track album data, then skipped, then failed - src_track = None - for key in ("successful_tracks", "skipped_tracks", "failed_tracks"): - arr = summary.get(key, []) or [] - if arr: - src_track = ( - arr[0] - if key != "failed_tracks" - else (arr[0].get("track") if isinstance(arr[0], dict) else None) - ) - break - if isinstance(src_track, dict): - album_obj = src_track.get("album", {}) or {} - album.setdefault("images", album_obj.get("images", [])) - album.setdefault("release_date", album_obj.get("release_date", {})) - album.setdefault("genres", album_obj.get("genres", [])) - album.setdefault( - "album_type", album_obj.get("album_type", album.get("album_type")) - ) - return album + def _enrich_album_metadata_from_summary(self, album: Dict, summary: Dict) -> Dict: + if album.get("images") and album.get("release_date") and album.get("genres"): + return album + # Prefer successful track album data, then skipped, then failed + src_track = None + for key in ("successful_tracks", "skipped_tracks", "failed_tracks"): + arr = summary.get(key, []) or [] + if arr: + src_track = ( + arr[0] + if key != "failed_tracks" + else (arr[0].get("track") if isinstance(arr[0], dict) else None) + ) + break + if isinstance(src_track, dict): + album_obj = src_track.get("album", {}) or {} + album.setdefault("images", album_obj.get("images", [])) + album.setdefault("release_date", album_obj.get("release_date", {})) + album.setdefault("genres", album_obj.get("genres", [])) + album.setdefault( + "album_type", album_obj.get("album_type", album.get("album_type")) + ) + return album - # --- Upsert helpers to avoid duplicate children rows and keep most complete --- - def _map_values_to_row(self, values: tuple) -> Dict: - ( - title, - artists_json, - album_title, - duration_ms, - track_number, - disc_number, - explicit, - status, - external_ids_json, - genres_json, - isrc, - timestamp, - position, - metadata_json, - ) = values - return { - "title": title, - "artists": artists_json, - "album_title": album_title, - "duration_ms": duration_ms, - "track_number": track_number, - "disc_number": disc_number, - "explicit": explicit, - "status": status, - "external_ids": external_ids_json, - "genres": genres_json, - "isrc": isrc, - "timestamp": timestamp, - "position": position, - "metadata": metadata_json, - } + # --- Upsert helpers to avoid duplicate children rows and keep most complete --- + def _map_values_to_row(self, values: tuple) -> Dict: + ( + title, + artists_json, + album_title, + duration_ms, + track_number, + disc_number, + explicit, + status, + external_ids_json, + genres_json, + isrc, + timestamp, + position, + metadata_json, + ) = values + return { + "title": title, + "artists": artists_json, + "album_title": album_title, + "duration_ms": duration_ms, + "track_number": track_number, + "disc_number": disc_number, + "explicit": explicit, + "status": status, + "external_ids": external_ids_json, + "genres": genres_json, + "isrc": isrc, + "timestamp": timestamp, + "position": position, + "metadata": metadata_json, + } - def _status_priority(self, status: str) -> int: - order = {"completed": 3, "skipped": 2, "failed": 1} - return order.get((status or "").lower(), 0) + def _status_priority(self, status: str) -> int: + order = {"completed": 3, "skipped": 2, "failed": 1} + return order.get((status or "").lower(), 0) - def _merge_child_rows(self, existing: Dict, new: Dict) -> Dict: - merged = existing.copy() - # Prefer non-empty/non-null values; for status use priority - for key in [ - "artists", - "album_title", - "duration_ms", - "track_number", - "disc_number", - "explicit", - "external_ids", - "genres", - "isrc", - "metadata", - ]: - old_val = merged.get(key) - new_val = new.get(key) - # Consider JSON strings: prefer longer/ non-empty - if (old_val in (None, "", 0)) and new_val not in (None, ""): - merged[key] = new_val - elif ( - isinstance(new_val, str) - and isinstance(old_val, str) - and len(new_val) > len(old_val) - ): - merged[key] = new_val - # Status: keep highest priority - if self._status_priority(new.get("status")) > self._status_priority( - existing.get("status") - ): - merged["status"] = new.get("status") - # Timestamp: keep earliest for creation but allow update to latest timestamp for last update - merged["timestamp"] = max( - existing.get("timestamp") or 0, new.get("timestamp") or 0 - ) - return merged + def _merge_child_rows(self, existing: Dict, new: Dict) -> Dict: + merged = existing.copy() + # Prefer non-empty/non-null values; for status use priority + for key in [ + "artists", + "album_title", + "duration_ms", + "track_number", + "disc_number", + "explicit", + "external_ids", + "genres", + "isrc", + "metadata", + ]: + old_val = merged.get(key) + new_val = new.get(key) + # Consider JSON strings: prefer longer/ non-empty + if (old_val in (None, "", 0)) and new_val not in (None, ""): + merged[key] = new_val + elif ( + isinstance(new_val, str) + and isinstance(old_val, str) + and len(new_val) > len(old_val) + ): + merged[key] = new_val + # Status: keep highest priority + if self._status_priority(new.get("status")) > self._status_priority( + existing.get("status") + ): + merged["status"] = new.get("status") + # Timestamp: keep earliest for creation but allow update to latest timestamp for last update + merged["timestamp"] = max( + existing.get("timestamp") or 0, new.get("timestamp") or 0 + ) + return merged - def _find_existing_child_row( - self, conn: sqlite3.Connection, table_name: str, new_row: Dict - ) -> Optional[Dict]: - try: - cursor = conn.execute( - f"SELECT * FROM {table_name} WHERE title = ?", - (new_row.get("title", ""),), - ) - candidates = [dict(r) for r in cursor.fetchall()] - if not candidates: - return None - # Try match by ISRC - isrc = new_row.get("isrc") - if isrc: - for r in candidates: - if (r.get("isrc") or "") == isrc: - return r - # Try match by position (playlist) then track_number (album) - pos = new_row.get("position") - if pos is not None: - for r in candidates: - if r.get("position") == pos: - return r - tn = new_row.get("track_number") - if tn: - for r in candidates: - if r.get("track_number") == tn: - return r - # Fallback: first candidate with same title - return candidates[0] - except Exception: - return None + def _find_existing_child_row( + self, conn: sqlite3.Connection, table_name: str, new_row: Dict + ) -> Optional[Dict]: + try: + cursor = conn.execute( + f"SELECT * FROM {table_name} WHERE title = ?", + (new_row.get("title", ""),), + ) + candidates = [dict(r) for r in cursor.fetchall()] + if not candidates: + return None + # Try match by ISRC + isrc = new_row.get("isrc") + if isrc: + for r in candidates: + if (r.get("isrc") or "") == isrc: + return r + # Try match by position (playlist) then track_number (album) + pos = new_row.get("position") + if pos is not None: + for r in candidates: + if r.get("position") == pos: + return r + tn = new_row.get("track_number") + if tn: + for r in candidates: + if r.get("track_number") == tn: + return r + # Fallback: first candidate with same title + return candidates[0] + except Exception: + return None - def _upsert_child_row( - self, conn: sqlite3.Connection, table_name: str, row: Dict - ) -> None: - existing = self._find_existing_child_row(conn, table_name, row) - if existing: - merged = self._merge_child_rows(existing, row) - conn.execute( - f""" + def _upsert_child_row( + self, conn: sqlite3.Connection, table_name: str, row: Dict + ) -> None: + existing = self._find_existing_child_row(conn, table_name, row) + if existing: + merged = self._merge_child_rows(existing, row) + conn.execute( + f""" UPDATE {table_name} SET artists = ?, album_title = ?, duration_ms = ?, track_number = ?, disc_number = ?, explicit = ?, status = ?, external_ids = ?, genres = ?, isrc = ?, timestamp = ?, position = ?, metadata = ? WHERE id = ? """, - ( - merged.get("artists"), - merged.get("album_title"), - merged.get("duration_ms"), - merged.get("track_number"), - merged.get("disc_number"), - merged.get("explicit"), - merged.get("status"), - merged.get("external_ids"), - merged.get("genres"), - merged.get("isrc"), - merged.get("timestamp"), - merged.get("position"), - merged.get("metadata"), - existing.get("id"), - ), - ) - else: - conn.execute( - f""" + ( + merged.get("artists"), + merged.get("album_title"), + merged.get("duration_ms"), + merged.get("track_number"), + merged.get("disc_number"), + merged.get("explicit"), + merged.get("status"), + merged.get("external_ids"), + merged.get("genres"), + merged.get("isrc"), + merged.get("timestamp"), + merged.get("position"), + merged.get("metadata"), + existing.get("id"), + ), + ) + else: + conn.execute( + f""" INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, isrc, timestamp, position, metadata ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - ( - row.get("title"), - row.get("artists"), - row.get("album_title"), - row.get("duration_ms"), - row.get("track_number"), - row.get("disc_number"), - row.get("explicit"), - row.get("status"), - row.get("external_ids"), - row.get("genres"), - row.get("isrc"), - row.get("timestamp"), - row.get("position"), - row.get("metadata"), - ), - ) + ( + row.get("title"), + row.get("artists"), + row.get("album_title"), + row.get("duration_ms"), + row.get("track_number"), + row.get("disc_number"), + row.get("explicit"), + row.get("status"), + row.get("external_ids"), + row.get("genres"), + row.get("isrc"), + row.get("timestamp"), + row.get("position"), + row.get("metadata"), + ), + ) # Global history manager instance From bbd7d5a98513a13562dba77a4b85a2f34a5542d5 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 20 Aug 2025 09:44:49 -0500 Subject: [PATCH 11/14] feat: Added visual confirmation when saving settings and moved button to the top right --- .../src/components/config/DownloadsTab.tsx | 31 +++++--- .../src/components/config/FormattingTab.tsx | 33 ++++++--- .../src/components/config/GeneralTab.tsx | 38 +++++++--- .../src/components/config/ServerTab.tsx | 70 ++++++++++++++----- .../src/components/config/WatchTab.tsx | 31 +++++--- 5 files changed, 151 insertions(+), 52 deletions(-) diff --git a/spotizerr-ui/src/components/config/DownloadsTab.tsx b/spotizerr-ui/src/components/config/DownloadsTab.tsx index bb491b8..81eec47 100644 --- a/spotizerr-ui/src/components/config/DownloadsTab.tsx +++ b/spotizerr-ui/src/components/config/DownloadsTab.tsx @@ -72,6 +72,7 @@ const fetchCredentials = async (service: "spotify" | "deezer"): Promise(""); + const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle"); // Fetch watch config const { data: watchConfig } = useQuery({ @@ -97,10 +98,14 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { mutationFn: saveDownloadConfig, onSuccess: () => { toast.success("Download settings saved successfully!"); + setSaveStatus("success"); + setTimeout(() => setSaveStatus("idle"), 3000); queryClient.invalidateQueries({ queryKey: ["config"] }); }, onError: (error) => { toast.error(`Failed to save settings: ${error.message}`); + setSaveStatus("error"); + setTimeout(() => setSaveStatus("idle"), 3000); }, }); @@ -173,6 +178,24 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { return (
+
+
+ {saveStatus === "success" && ( + Saved + )} + {saveStatus === "error" && ( + Save failed + )} + +
+
+ {/* Download Settings */}

Download Behavior

@@ -360,14 +383,6 @@ export function DownloadsTab({ config, isLoading }: DownloadsTabProps) { />
- - ); } diff --git a/spotizerr-ui/src/components/config/FormattingTab.tsx b/spotizerr-ui/src/components/config/FormattingTab.tsx index fcd05ef..57ea7a7 100644 --- a/spotizerr-ui/src/components/config/FormattingTab.tsx +++ b/spotizerr-ui/src/components/config/FormattingTab.tsx @@ -1,4 +1,4 @@ -import { useRef } from "react"; +import { useRef, useState } from "react"; import { useForm, type SubmitHandler } from "react-hook-form"; import { authApiClient } from "../../lib/api-client"; import { toast } from "sonner"; @@ -79,15 +79,20 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { const queryClient = useQueryClient(); const dirInputRef = useRef(null); const trackInputRef = useRef(null); + const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle"); const mutation = useMutation({ mutationFn: saveFormattingConfig, onSuccess: () => { toast.success("Formatting settings saved!"); + setSaveStatus("success"); + setTimeout(() => setSaveStatus("idle"), 3000); queryClient.invalidateQueries({ queryKey: ["config"] }); }, onError: (error) => { toast.error(`Failed to save settings: ${error.message}`); + setSaveStatus("error"); + setTimeout(() => setSaveStatus("idle"), 3000); }, }); @@ -120,6 +125,24 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { return (
+
+
+ {saveStatus === "success" && ( + Saved + )} + {saveStatus === "error" && ( + Save failed + )} + +
+
+

File Naming

@@ -185,14 +208,6 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) {
- -
); } diff --git a/spotizerr-ui/src/components/config/GeneralTab.tsx b/spotizerr-ui/src/components/config/GeneralTab.tsx index a0aa968..171cd55 100644 --- a/spotizerr-ui/src/components/config/GeneralTab.tsx +++ b/spotizerr-ui/src/components/config/GeneralTab.tsx @@ -3,7 +3,7 @@ import { authApiClient } from "../../lib/api-client"; import { toast } from "sonner"; import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; import { useSettings } from "../../contexts/settings-context"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; // --- Type Definitions --- interface Credential { @@ -56,13 +56,21 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro } }, [config, reset]); + const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle"); + const mutation = useMutation({ mutationFn: saveGeneralConfig, onSuccess: () => { toast.success("General settings saved!"); + setSaveStatus("success"); + setTimeout(() => setSaveStatus("idle"), 3000); queryClient.invalidateQueries({ queryKey: ["config"] }); }, - onError: (e: Error) => toast.error(`Failed to save: ${e.message}`), + onError: (e: Error) => { + toast.error(`Failed to save: ${e.message}`); + setSaveStatus("error"); + setTimeout(() => setSaveStatus("idle"), 3000); + }, }); const onSubmit: SubmitHandler = (data) => { @@ -74,6 +82,24 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro return (
+
+
+ {saveStatus === "success" && ( + Saved + )} + {saveStatus === "error" && ( + Save failed + )} + +
+
+

Service Defaults

@@ -140,14 +166,6 @@ export function GeneralTab({ config, isLoading: isConfigLoading }: GeneralTabPro The explicit content filter is controlled by an environment variable and cannot be changed here.

- - ); } diff --git a/spotizerr-ui/src/components/config/ServerTab.tsx b/spotizerr-ui/src/components/config/ServerTab.tsx index c71c19c..e0b288c 100644 --- a/spotizerr-ui/src/components/config/ServerTab.tsx +++ b/spotizerr-ui/src/components/config/ServerTab.tsx @@ -1,4 +1,4 @@ -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { useForm, Controller } from "react-hook-form"; import { authApiClient } from "../../lib/api-client"; import { toast } from "sonner"; @@ -46,14 +46,21 @@ function SpotifyApiForm() { const queryClient = useQueryClient(); const { data, isLoading } = useQuery({ queryKey: ["spotifyApiConfig"], queryFn: fetchSpotifyApiConfig }); const { register, handleSubmit, reset } = useForm(); + const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle"); const mutation = useMutation({ mutationFn: saveSpotifyApiConfig, onSuccess: () => { toast.success("Spotify API settings saved!"); + setSaveStatus("success"); + setTimeout(() => setSaveStatus("idle"), 3000); queryClient.invalidateQueries({ queryKey: ["spotifyApiConfig"] }); }, - onError: (e) => toast.error(`Failed to save: ${e.message}`), + onError: (e) => { + toast.error(`Failed to save: ${e.message}`); + setSaveStatus("error"); + setTimeout(() => setSaveStatus("idle"), 3000); + }, }); useEffect(() => { @@ -66,6 +73,24 @@ function SpotifyApiForm() { return (
+
+
+ {saveStatus === "success" && ( + Saved + )} + {saveStatus === "error" && ( + Save failed + )} + +
+
+
-
); } @@ -102,14 +120,21 @@ function WebhookForm() { const { data, isLoading } = useQuery({ queryKey: ["webhookConfig"], queryFn: fetchWebhookConfig }); const { register, handleSubmit, control, reset, watch } = useForm(); const currentUrl = watch("url"); + const [saveStatus, setSaveStatus] = useState<"idle" | "success" | "error">("idle"); const mutation = useMutation({ mutationFn: saveWebhookConfig, onSuccess: () => { // No toast needed since the function shows one + setSaveStatus("success"); + setTimeout(() => setSaveStatus("idle"), 3000); queryClient.invalidateQueries({ queryKey: ["webhookConfig"] }); }, - onError: (e) => toast.error(`Failed to save: ${e.message}`), + onError: (e) => { + toast.error(`Failed to save: ${e.message}`); + setSaveStatus("error"); + setTimeout(() => setSaveStatus("idle"), 3000); + }, }); const testMutation = useMutation({ @@ -130,6 +155,24 @@ function WebhookForm() { return (
+
+
+ {saveStatus === "success" && ( + Saved + )} + {saveStatus === "error" && ( + Save failed + )} + +
+
+
- +
+
+

Watchlist Behavior

@@ -234,14 +257,6 @@ export function WatchTab() { ))}
- - ); } From 772c22daa88b9a26d710fe19c9fccf635e86edc5 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 20 Aug 2025 10:16:24 -0500 Subject: [PATCH 12/14] fix: History quality + bitrate rendering and migration schema --- routes/migrations/v3_1_2.py | 81 +++++++++-- routes/utils/history_manager.py | 207 ++++++++++++++++++++++++---- spotizerr-ui/src/routes/history.tsx | 16 ++- 3 files changed, 260 insertions(+), 44 deletions(-) diff --git a/routes/migrations/v3_1_2.py b/routes/migrations/v3_1_2.py index 1e70fc9..26b00d0 100644 --- a/routes/migrations/v3_1_2.py +++ b/routes/migrations/v3_1_2.py @@ -1,24 +1,85 @@ import sqlite3 +import logging + + +logger = logging.getLogger(__name__) class MigrationV3_1_2: """ - Dummy migration for version 3.1.2. - No database schema changes were made between these versions. - This class serves as a placeholder to ensure the migration runner - is aware of this version and can proceed without errors. + Migration for version 3.1.2. + Ensure history children tables (album_*/playlist_*) include service and quality columns. """ + CHILDREN_EXTRA_COLUMNS: dict[str, str] = { + "service": "TEXT", + "quality_format": "TEXT", + "quality_bitrate": "TEXT", + } + + def _table_columns(self, conn: sqlite3.Connection, table: str) -> set[str]: + try: + cur = conn.execute(f"PRAGMA table_info({table})") + return {row[1] for row in cur.fetchall()} + except sqlite3.OperationalError: + return set() + + def _list_children_tables(self, conn: sqlite3.Connection) -> list[str]: + tables: set[str] = set() + try: + cur = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE 'album_%' OR name LIKE 'playlist_%') AND name != 'download_history'" + ) + for row in cur.fetchall(): + if row and row[0]: + tables.add(row[0]) + except sqlite3.Error as e: + logger.warning(f"Failed to scan sqlite_master for children tables: {e}") + + try: + cur = conn.execute( + "SELECT DISTINCT children_table FROM download_history WHERE children_table IS NOT NULL AND TRIM(children_table) != ''" + ) + for row in cur.fetchall(): + t = row[0] + if t: + tables.add(t) + except sqlite3.Error as e: + logger.warning(f"Failed to scan download_history for children tables: {e}") + + return sorted(tables) + def check_history(self, conn: sqlite3.Connection) -> bool: - # No changes, so migration is not needed. + tables = self._list_children_tables(conn) + if not tables: + # Nothing to migrate + return True + # If any table is missing any of the extra columns, migration is needed + for t in tables: + cols = self._table_columns(conn, t) + if not set(self.CHILDREN_EXTRA_COLUMNS.keys()).issubset(cols): + return False return True def update_history(self, conn: sqlite3.Connection) -> None: - # No-op - pass + tables = self._list_children_tables(conn) + for t in tables: + existing = self._table_columns(conn, t) + for col_name, col_type in self.CHILDREN_EXTRA_COLUMNS.items(): + if col_name in existing: + continue + try: + conn.execute(f"ALTER TABLE {t} ADD COLUMN {col_name} {col_type}") + logger.info( + f"Added column '{col_name} {col_type}' to history children table '{t}'." + ) + except sqlite3.OperationalError as e: + logger.warning( + f"Could not add column '{col_name}' to history children table '{t}': {e}" + ) def check_watch_artists(self, conn: sqlite3.Connection) -> bool: - # No changes, so migration is not needed. + # No changes for watch artists in 3.1.2 return True def update_watch_artists(self, conn: sqlite3.Connection) -> None: @@ -26,7 +87,7 @@ class MigrationV3_1_2: pass def check_watch_playlists(self, conn: sqlite3.Connection) -> bool: - # No changes, so migration is not needed. + # No changes for watch playlists in 3.1.2 return True def update_watch_playlists(self, conn: sqlite3.Connection) -> None: @@ -34,7 +95,7 @@ class MigrationV3_1_2: pass def check_accounts(self, conn: sqlite3.Connection) -> bool: - # No changes, so migration is not needed. + # No changes for accounts in 3.1.2 return True def update_accounts(self, conn: sqlite3.Connection) -> None: diff --git a/routes/utils/history_manager.py b/routes/utils/history_manager.py index 26b9913..375fa70 100644 --- a/routes/utils/history_manager.py +++ b/routes/utils/history_manager.py @@ -265,6 +265,56 @@ class HistoryManager: f"Error ensuring schema for {table_description} table '{table_name}': {e}" ) + def _create_children_table_with_cursor( + self, cursor: sqlite3.Cursor, table_name: str + ) -> None: + """Create or upgrade a children table using an existing cursor/connection to avoid nested write locks.""" + cursor.execute(f""" + CREATE TABLE IF NOT EXISTS {table_name} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + artists TEXT, + album_title TEXT, + duration_ms INTEGER, + track_number INTEGER, + disc_number INTEGER, + explicit BOOLEAN, + status TEXT NOT NULL, + external_ids TEXT, + genres TEXT, + isrc TEXT, + timestamp REAL NOT NULL, + position INTEGER, + metadata TEXT, + service TEXT, + quality_format TEXT, + quality_bitrate TEXT + ) + """) + expected_children_columns = { + "id": "INTEGER PRIMARY KEY AUTOINCREMENT", + "title": "TEXT NOT NULL", + "artists": "TEXT", + "album_title": "TEXT", + "duration_ms": "INTEGER", + "track_number": "INTEGER", + "disc_number": "INTEGER", + "explicit": "BOOLEAN", + "status": "TEXT NOT NULL", + "external_ids": "TEXT", + "genres": "TEXT", + "isrc": "TEXT", + "timestamp": "REAL NOT NULL", + "position": "INTEGER", + "metadata": "TEXT", + "service": "TEXT", + "quality_format": "TEXT", + "quality_bitrate": "TEXT", + } + self._ensure_table_schema( + cursor, table_name, expected_children_columns, "children history" + ) + def _create_children_table(self, table_name: str): """ Create a children table for storing individual tracks of an album/playlist. @@ -291,7 +341,10 @@ class HistoryManager: isrc TEXT, timestamp REAL NOT NULL, position INTEGER, - metadata TEXT + metadata TEXT, + service TEXT, + quality_format TEXT, + quality_bitrate TEXT ) """) expected_children_columns = { @@ -310,6 +363,9 @@ class HistoryManager: "timestamp": "REAL NOT NULL", "position": "INTEGER", "metadata": "TEXT", + "service": "TEXT", + "quality_format": "TEXT", + "quality_bitrate": "TEXT", } self._ensure_table_schema( cursor, table_name, expected_children_columns, "children history" @@ -327,7 +383,7 @@ class HistoryManager: cursor.execute( f"CREATE TABLE IF NOT EXISTS {t} (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL)" ) - self._create_children_table(t) + self._create_children_table_with_cursor(cursor, t) except Exception as e: logger.warning(f"Non-fatal: failed to migrate children table {t}: {e}") @@ -525,13 +581,19 @@ class HistoryManager: "status_info": status_info, } + # Inherit service/quality directly from parent summary when available + parent_summary = status_info.get("summary") or {} + parent_service = parent_summary.get("service") + parent_quality_format = parent_summary.get("quality") + parent_quality_bitrate = parent_summary.get("bitrate") + conn.execute( f""" INSERT INTO {table} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, - isrc, timestamp, position, metadata - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + isrc, timestamp, position, metadata, service, quality_format, quality_bitrate + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, ( track.get("title", "Unknown"), @@ -548,6 +610,9 @@ class HistoryManager: callback_data.get("timestamp", time.time()), track.get("position", 0), # For playlist tracks json.dumps(children_metadata), + parent_service, + parent_quality_format, + parent_quality_bitrate, ), ) @@ -714,7 +779,12 @@ class HistoryManager: try: if summary: self._populate_album_children_table( - children_table, summary, album.get("title", "") + children_table, + summary, + album.get("title", ""), + service_to_store, + quality_format_to_store, + quality_bitrate_to_store, ) except Exception as e: logger.warning( @@ -907,7 +977,13 @@ class HistoryManager: # If we have a summary (e.g., on cancellation), populate children from it including failed ones try: if summary: - self._populate_playlist_children_table(children_table, summary) + self._populate_playlist_children_table( + children_table, + summary, + service_to_store, + quality_format_to_store, + quality_bitrate_to_store, + ) except Exception as e: logger.warning( f"Failed to populate children from summary for playlist {children_table}: {e}" @@ -923,8 +999,14 @@ class HistoryManager: return None def _populate_album_children_table( - self, table_name: str, summary: Dict, album_title: str - ): + self, + table_name: str, + summary: Dict, + album_title: str, + service: str, + quality_format: Optional[str], + quality_bitrate: Optional[str], + ) -> None: """Populate children table with individual track records from album summary.""" try: # Ensure table exists before population @@ -934,7 +1016,12 @@ class HistoryManager: # Add successful tracks for track in summary.get("successful_tracks", []): track_data = self._prepare_child_track_data( - track, album_title, "completed" + track, + album_title, + "completed", + service, + quality_format, + quality_bitrate, ) all_rows.append(self._map_values_to_row(track_data["values"])) @@ -942,7 +1029,12 @@ class HistoryManager: for failed_item in summary.get("failed_tracks", []): track = failed_item.get("track", {}) track_data = self._prepare_child_track_data( - track, album_title, "failed" + track, + album_title, + "failed", + service, + quality_format, + quality_bitrate, ) track_data["metadata"]["failure_reason"] = failed_item.get( "reason", "Unknown error" @@ -952,7 +1044,12 @@ class HistoryManager: # Add skipped tracks for track in summary.get("skipped_tracks", []): track_data = self._prepare_child_track_data( - track, album_title, "skipped" + track, + album_title, + "skipped", + service, + quality_format, + quality_bitrate, ) all_rows.append(self._map_values_to_row(track_data["values"])) @@ -968,7 +1065,14 @@ class HistoryManager: except Exception as e: logger.error(f"Failed to populate album children table {table_name}: {e}") - def _populate_playlist_children_table(self, table_name: str, summary: Dict): + def _populate_playlist_children_table( + self, + table_name: str, + summary: Dict, + service: str, + quality_format: Optional[str], + quality_bitrate: Optional[str], + ): """Populate children table with individual track records from playlist summary.""" try: # Ensure table exists before population @@ -977,13 +1081,17 @@ class HistoryManager: # Add successful tracks for track in summary.get("successful_tracks", []): - track_data = self._prepare_child_track_data(track, "", "completed") + track_data = self._prepare_child_track_data( + track, "", "completed", service, quality_format, quality_bitrate + ) all_rows.append(self._map_values_to_row(track_data["values"])) # Add failed tracks for failed_item in summary.get("failed_tracks", []): track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data(track, "", "failed") + track_data = self._prepare_child_track_data( + track, "", "failed", service, quality_format, quality_bitrate + ) track_data["metadata"]["failure_reason"] = failed_item.get( "reason", "Unknown error" ) @@ -991,7 +1099,9 @@ class HistoryManager: # Add skipped tracks for track in summary.get("skipped_tracks", []): - track_data = self._prepare_child_track_data(track, "", "skipped") + track_data = self._prepare_child_track_data( + track, "", "skipped", service, quality_format, quality_bitrate + ) all_rows.append(self._map_values_to_row(track_data["values"])) with self._get_connection() as conn: @@ -1008,7 +1118,13 @@ class HistoryManager: ) def _prepare_child_track_data( - self, track: Dict, default_album: str, status: str + self, + track: Dict, + default_album: str, + status: str, + service: str, + quality_format: Optional[str], + quality_bitrate: Optional[str], ) -> Dict: """Prepare track data for insertion into children table.""" artists = self._extract_artists(track) @@ -1039,6 +1155,9 @@ class HistoryManager: time.time(), track.get("position", 0), # For playlist tracks json.dumps(metadata), + service, + quality_format, + quality_bitrate, ) return {"values": values, "metadata": metadata} @@ -1391,7 +1510,13 @@ class HistoryManager: # --- New helpers for failed children insertion and metadata enrichment --- def _populate_failed_children_for_album( - self, table_name: str, summary: Dict, album_title: str + self, + table_name: str, + summary: Dict, + album_title: str, + service: str, + quality_format: Optional[str], + quality_bitrate: Optional[str], ) -> None: try: self._create_children_table(table_name) @@ -1399,7 +1524,12 @@ class HistoryManager: for failed_item in summary.get("failed_tracks", []): track = failed_item.get("track", {}) track_data = self._prepare_child_track_data( - track, album_title, "failed" + track, + album_title, + "failed", + service, + quality_format, + quality_bitrate, ) track_data["metadata"]["failure_reason"] = failed_item.get( "reason", "cancelled" @@ -1409,8 +1539,8 @@ class HistoryManager: INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, - isrc, timestamp, position, metadata - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + isrc, timestamp, position, metadata, service, quality_format, quality_bitrate + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, track_data["values"], ) @@ -1420,14 +1550,21 @@ class HistoryManager: ) def _populate_failed_children_for_playlist( - self, table_name: str, summary: Dict + self, + table_name: str, + summary: Dict, + service: str, + quality_format: Optional[str], + quality_bitrate: Optional[str], ) -> None: try: self._create_children_table(table_name) with self._get_connection() as conn: for failed_item in summary.get("failed_tracks", []): track = failed_item.get("track", {}) - track_data = self._prepare_child_track_data(track, "", "failed") + track_data = self._prepare_child_track_data( + track, "", "failed", service, quality_format, quality_bitrate + ) track_data["metadata"]["failure_reason"] = failed_item.get( "reason", "cancelled" ) @@ -1436,8 +1573,8 @@ class HistoryManager: INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, - isrc, timestamp, position, metadata - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + isrc, timestamp, position, metadata, service, quality_format, quality_bitrate + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, track_data["values"], ) @@ -1487,6 +1624,9 @@ class HistoryManager: timestamp, position, metadata_json, + service, + quality_format, + quality_bitrate, ) = values return { "title": title, @@ -1503,6 +1643,9 @@ class HistoryManager: "timestamp": timestamp, "position": position, "metadata": metadata_json, + "service": service, + "quality_format": quality_format, + "quality_bitrate": quality_bitrate, } def _status_priority(self, status: str) -> int: @@ -1523,6 +1666,9 @@ class HistoryManager: "genres", "isrc", "metadata", + "service", + "quality_format", + "quality_bitrate", ]: old_val = merged.get(key) new_val = new.get(key) @@ -1590,7 +1736,8 @@ class HistoryManager: UPDATE {table_name} SET artists = ?, album_title = ?, duration_ms = ?, track_number = ?, disc_number = ?, explicit = ?, status = ?, external_ids = ?, - genres = ?, isrc = ?, timestamp = ?, position = ?, metadata = ? + genres = ?, isrc = ?, timestamp = ?, position = ?, metadata = ?, + service = ?, quality_format = ?, quality_bitrate = ? WHERE id = ? """, ( @@ -1607,6 +1754,9 @@ class HistoryManager: merged.get("timestamp"), merged.get("position"), merged.get("metadata"), + merged.get("service"), + merged.get("quality_format"), + merged.get("quality_bitrate"), existing.get("id"), ), ) @@ -1616,8 +1766,8 @@ class HistoryManager: INSERT INTO {table_name} ( title, artists, album_title, duration_ms, track_number, disc_number, explicit, status, external_ids, genres, - isrc, timestamp, position, metadata - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + isrc, timestamp, position, metadata, service, quality_format, quality_bitrate + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, ( row.get("title"), @@ -1634,6 +1784,9 @@ class HistoryManager: row.get("timestamp"), row.get("position"), row.get("metadata"), + row.get("service"), + row.get("quality_format"), + row.get("quality_bitrate"), ), ) diff --git a/spotizerr-ui/src/routes/history.tsx b/spotizerr-ui/src/routes/history.tsx index 495cc15..7effd19 100644 --- a/spotizerr-ui/src/routes/history.tsx +++ b/spotizerr-ui/src/routes/history.tsx @@ -54,6 +54,9 @@ type ChildTrack = { timestamp: number; position?: number; metadata: Record; + service?: string; + quality_format?: string; + quality_bitrate?: string; }; type ChildrenResponse = { @@ -73,7 +76,9 @@ const STATUS_CLASS: Record = { skipped: "text-content-muted dark:text-content-muted-dark", }; -const formatQuality = (entry: HistoryEntry): string => { +const formatQuality = ( + entry: { quality_format?: string; quality_bitrate?: string } +): string => { const format = entry.quality_format || "Unknown"; const bitrate = entry.quality_bitrate || ""; return bitrate ? `${format} ${bitrate}` : format; @@ -195,11 +200,8 @@ export const History = () => { id: "quality", header: "Quality", cell: (info) => { - const entry = info.row.original; - if ("download_type" in entry) { - return formatQuality(entry); - } - return "N/A"; + const entry = info.row.original as HistoryEntry | ChildTrack; + return formatQuality(entry); }, }), columnHelper.accessor("status", { @@ -622,7 +624,7 @@ export const History = () => {
Quality: - {"download_type" in entry ? formatQuality(entry) : "N/A"} + {formatQuality(entry as HistoryEntry | ChildTrack)}
From ac49089e255b4cfa7d85e40d9123c56f75b214b1 Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 20 Aug 2025 10:42:21 -0500 Subject: [PATCH 13/14] feat: Added pad_number_width setting to the backend (deezspot 2.7.2 --- routes/utils/album.py | 5 +++++ routes/utils/celery_config.py | 2 ++ routes/utils/celery_tasks.py | 15 +++++++++++++++ routes/utils/playlist.py | 5 +++++ routes/utils/track.py | 5 +++++ 5 files changed, 32 insertions(+) diff --git a/routes/utils/album.py b/routes/utils/album.py index b81e0fa..be67078 100755 --- a/routes/utils/album.py +++ b/routes/utils/album.py @@ -32,6 +32,7 @@ def download_album( spotify_metadata=True, _is_celery_task_execution=False, # Added to skip duplicate check from Celery task real_time_multiplier=None, + pad_number_width=None, ): if not _is_celery_task_execution: existing_task = get_existing_task_id( @@ -117,6 +118,7 @@ def download_album( bitrate=bitrate, artist_separator=artist_separator, spotify_metadata=spotify_metadata, + pad_number_width=pad_number_width, ) print( f"DEBUG: album.py - Album download via Deezer (account: {fallback}) successful for Spotify URL." @@ -175,6 +177,7 @@ def download_album( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: album.py - Spotify direct download (account: {main} for blob) successful." @@ -231,6 +234,7 @@ def download_album( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: album.py - Direct Spotify download (account: {main} for blob) successful." @@ -271,6 +275,7 @@ def download_album( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + pad_number_width=pad_number_width, ) print( f"DEBUG: album.py - Direct Deezer download (account: {main}) successful." diff --git a/routes/utils/celery_config.py b/routes/utils/celery_config.py index f95812e..70fd3a3 100644 --- a/routes/utils/celery_config.py +++ b/routes/utils/celery_config.py @@ -50,6 +50,7 @@ DEFAULT_MAIN_CONFIG = { "separateTracksByUser": False, "watch": {}, "realTimeMultiplier": 0, + "padNumberWidth": 3, } @@ -65,6 +66,7 @@ def _migrate_legacy_keys(cfg: dict) -> tuple[dict, bool]: "recursive_quality": "recursiveQuality", "spotify_metadata": "spotifyMetadata", "real_time_multiplier": "realTimeMultiplier", + "pad_number_width": "padNumberWidth", } for legacy, camel in legacy_map.items(): if legacy in out and camel not in out: diff --git a/routes/utils/celery_tasks.py b/routes/utils/celery_tasks.py index f1a384d..52853f5 100644 --- a/routes/utils/celery_tasks.py +++ b/routes/utils/celery_tasks.py @@ -550,6 +550,9 @@ def retry_task(task_id): task_info["pad_tracks"] = task_info.get( "pad_tracks", config_params.get("tracknum_padding", True) ) + task_info["pad_number_width"] = task_info.get( + "pad_number_width", config_params.get("padNumberWidth", 3) + ) # Store the updated task info store_task_info(new_task_id, task_info) @@ -1629,6 +1632,9 @@ def download_track(self, **task_data): real_time_multiplier = task_data.get( "real_time_multiplier", config_params.get("realTimeMultiplier", 0) ) + pad_number_width = task_data.get( + "pad_number_width", config_params.get("padNumberWidth", 3) + ) # Execute the download - service is now determined from URL download_track_func( @@ -1650,6 +1656,7 @@ def download_track(self, **task_data): spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task (consistency) real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) return {"status": "success", "message": "Track download completed"} @@ -1732,6 +1739,9 @@ def download_album(self, **task_data): real_time_multiplier = task_data.get( "real_time_multiplier", config_params.get("realTimeMultiplier", 0) ) + pad_number_width = task_data.get( + "pad_number_width", config_params.get("padNumberWidth", 3) + ) # Execute the download - service is now determined from URL download_album_func( @@ -1753,6 +1763,7 @@ def download_album(self, **task_data): spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) return {"status": "success", "message": "Album download completed"} @@ -1844,6 +1855,9 @@ def download_playlist(self, **task_data): real_time_multiplier = task_data.get( "real_time_multiplier", config_params.get("realTimeMultiplier", 0) ) + pad_number_width = task_data.get( + "pad_number_width", config_params.get("padNumberWidth", 3) + ) # Execute the download - service is now determined from URL download_playlist_func( @@ -1868,6 +1882,7 @@ def download_playlist(self, **task_data): spotify_metadata=spotify_metadata, _is_celery_task_execution=True, # Skip duplicate check inside Celery task real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) return {"status": "success", "message": "Playlist download completed"} diff --git a/routes/utils/playlist.py b/routes/utils/playlist.py index ffd47ed..efdec27 100755 --- a/routes/utils/playlist.py +++ b/routes/utils/playlist.py @@ -29,6 +29,7 @@ def download_playlist( spotify_metadata=True, _is_celery_task_execution=False, # Added to skip duplicate check from Celery task real_time_multiplier=None, + pad_number_width=None, ): if not _is_celery_task_execution: existing_task = get_existing_task_id( @@ -114,6 +115,7 @@ def download_playlist( bitrate=bitrate, artist_separator=artist_separator, spotify_metadata=spotify_metadata, + pad_number_width=pad_number_width, ) print( f"DEBUG: playlist.py - Playlist download via Deezer (account: {fallback}) successful for Spotify URL." @@ -177,6 +179,7 @@ def download_playlist( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: playlist.py - Spotify direct download (account: {main} for blob) successful." @@ -239,6 +242,7 @@ def download_playlist( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: playlist.py - Direct Spotify download (account: {main} for blob) successful." @@ -279,6 +283,7 @@ def download_playlist( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + pad_number_width=pad_number_width, ) print( f"DEBUG: playlist.py - Direct Deezer download (account: {main}) successful." diff --git a/routes/utils/track.py b/routes/utils/track.py index 6af3c7d..e1f8b4a 100755 --- a/routes/utils/track.py +++ b/routes/utils/track.py @@ -30,6 +30,7 @@ def download_track( spotify_metadata=True, _is_celery_task_execution=False, # Added for consistency, not currently used for duplicate check real_time_multiplier=None, + pad_number_width=None, ): try: # Detect URL source (Spotify or Deezer) from URL @@ -108,6 +109,7 @@ def download_track( bitrate=bitrate, artist_separator=artist_separator, spotify_metadata=spotify_metadata, + pad_number_width=pad_number_width, ) print( f"DEBUG: track.py - Track download via Deezer (account: {fallback}) successful for Spotify URL." @@ -168,6 +170,7 @@ def download_track( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: track.py - Spotify direct download (account: {main} for blob) successful." @@ -225,6 +228,7 @@ def download_track( bitrate=bitrate, artist_separator=artist_separator, real_time_multiplier=real_time_multiplier, + pad_number_width=pad_number_width, ) print( f"DEBUG: track.py - Direct Spotify download (account: {main} for blob) successful." @@ -264,6 +268,7 @@ def download_track( convert_to=convert_to, bitrate=bitrate, artist_separator=artist_separator, + pad_number_width=pad_number_width, ) print( f"DEBUG: track.py - Direct Deezer download (account: {main}) successful." From b564f016b929e013ee84b12fb623e154cdbdd52b Mon Sep 17 00:00:00 2001 From: Xoconoch Date: Wed, 20 Aug 2025 20:47:18 -0600 Subject: [PATCH 14/14] feat: Add track number padding width to the frontend --- requirements.txt | 2 +- .../src/components/config/FormattingTab.tsx | 27 ++++++++++++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1d9e200..eb1861e 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ fastapi==0.116.1 uvicorn[standard]==0.35.0 celery==5.5.3 -deezspot-spotizerr==2.7.2 +deezspot-spotizerr==2.7.3 httpx==0.28.1 bcrypt==4.2.1 PyJWT==2.10.1 diff --git a/spotizerr-ui/src/components/config/FormattingTab.tsx b/spotizerr-ui/src/components/config/FormattingTab.tsx index 57ea7a7..46b365e 100644 --- a/spotizerr-ui/src/components/config/FormattingTab.tsx +++ b/spotizerr-ui/src/components/config/FormattingTab.tsx @@ -16,6 +16,7 @@ interface FormattingSettings { compilation: string; artistSeparator: string; spotifyMetadata: boolean; + padNumberWidth?: number | "auto"; } interface FormattingTabProps { @@ -97,7 +98,10 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { }); const { register, handleSubmit, setValue } = useForm({ - values: config, + values: { + ...config, + padNumberWidth: config.padNumberWidth ?? 3, + }, }); // Correctly register the refs for react-hook-form while also holding a local ref. @@ -188,6 +192,27 @@ export function FormattingTab({ config, isLoading }: FormattingTabProps) { className="h-6 w-6 rounded" />
+
+ + { + if (typeof v !== "string") return v; + const trimmed = v.trim().toLowerCase(); + if (trimmed === "auto") return "auto" as const; + const parsed = parseInt(trimmed, 10); + return Number.isNaN(parsed) ? 3 : parsed; + }, + })} + className="block w-40 p-2 border bg-input-background dark:bg-input-background-dark border-input-border dark:border-input-border-dark rounded-md focus:outline-none focus:ring-2 focus:ring-input-focus text-sm" + /> +
+

+ "01. Track" if set to 2, "001. Track" if set to 3... +