From c19e85f0c9f6217b49d91651ea5ad212053fb754 Mon Sep 17 00:00:00 2001 From: Rasmus Widing Date: Wed, 27 Aug 2025 11:05:33 +0300 Subject: [PATCH] fix: include_archived flag now works correctly in task listing - Add include_archived parameter to TaskService.list_tasks() - Service now conditionally applies archived filter based on parameter - Add 'archived' field to task DTO for client visibility - Update API endpoints to pass include_archived down to service - Remove redundant client-side filtering in API layer - Fix type hints in integration tests (dict[str, Any] | None) - Use pytest.skip() instead of return for proper test reporting These fixes address the functional bug identified by CodeRabbit where archived tasks couldn't be retrieved even when explicitly requested. --- python/src/server/api_routes/projects_api.py | 16 ++++------------ .../src/server/services/projects/task_service.py | 14 ++++++++++---- .../tests/test_token_optimization_integration.py | 9 ++++----- 3 files changed, 18 insertions(+), 21 deletions(-) diff --git a/python/src/server/api_routes/projects_api.py b/python/src/server/api_routes/projects_api.py index 3a735a8e..669b06d7 100644 --- a/python/src/server/api_routes/projects_api.py +++ b/python/src/server/api_routes/projects_api.py @@ -512,8 +512,9 @@ async def list_project_tasks(project_id: str, include_archived: bool = False, ex task_service = TaskService() success, result = task_service.list_tasks( project_id=project_id, - include_closed=True, # Get all tasks, we'll filter archived separately + include_closed=True, # Get all tasks, including done exclude_large_fields=exclude_large_fields, + include_archived=include_archived, # Pass the flag down to service ) if not success: @@ -521,20 +522,11 @@ async def list_project_tasks(project_id: str, include_archived: bool = False, ex tasks = result.get("tasks", []) - # Apply filters - filtered_tasks = [] - for task in tasks: - # Skip archived tasks if not including them (handle None as False) - if not include_archived and task.get("archived", False): - continue - - filtered_tasks.append(task) - logfire.info( - f"Project tasks retrieved | project_id={project_id} | task_count={len(filtered_tasks)}" + f"Project tasks retrieved | project_id={project_id} | task_count={len(tasks)}" ) - return filtered_tasks + return tasks except HTTPException: raise diff --git a/python/src/server/services/projects/task_service.py b/python/src/server/services/projects/task_service.py index 9d82d0f5..76471a26 100644 --- a/python/src/server/services/projects/task_service.py +++ b/python/src/server/services/projects/task_service.py @@ -190,7 +190,8 @@ class TaskService: project_id: str = None, status: str = None, include_closed: bool = False, - exclude_large_fields: bool = False + exclude_large_fields: bool = False, + include_archived: bool = False ) -> tuple[bool, dict[str, Any]]: """ List tasks with various filters. @@ -200,6 +201,7 @@ class TaskService: status: Filter by status include_closed: Include done tasks exclude_large_fields: If True, excludes sources and code_examples fields + include_archived: If True, includes archived tasks Returns: Tuple of (success, result_dict) @@ -239,9 +241,12 @@ class TaskService: query = query.neq("status", "done") filters_applied.append("exclude done tasks") - # Filter out archived tasks using is null or is false - query = query.or_("archived.is.null,archived.is.false") - filters_applied.append("exclude archived tasks (null or false)") + # Filter out archived tasks only if not including them + if not include_archived: + query = query.or_("archived.is.null,archived.is.false") + filters_applied.append("exclude archived tasks (null or false)") + else: + filters_applied.append("include all tasks (including archived)") logger.info(f"Listing tasks with filters: {', '.join(filters_applied)}") @@ -295,6 +300,7 @@ class TaskService: "feature": task.get("feature"), "created_at": task["created_at"], "updated_at": task["updated_at"], + "archived": task.get("archived", False), } if not exclude_large_fields: diff --git a/python/tests/test_token_optimization_integration.py b/python/tests/test_token_optimization_integration.py index 7d82caa9..666190c0 100644 --- a/python/tests/test_token_optimization_integration.py +++ b/python/tests/test_token_optimization_integration.py @@ -6,10 +6,11 @@ Run with: uv run pytest tests/test_token_optimization_integration.py -v import httpx import json import asyncio +import pytest from typing import Dict, Any, Tuple -async def measure_response_size(url: str, params: Dict[str, Any] = None) -> Tuple[int, int]: +async def measure_response_size(url: str, params: dict[str, Any] | None = None) -> tuple[int, float]: """Measure response size and estimate token count.""" async with httpx.AsyncClient() as client: try: @@ -38,8 +39,7 @@ async def test_projects_endpoint(): if size_full > 0: print(f"Full content: {size_full:,} bytes | ~{tokens_full:,.0f} tokens") else: - print("⚠️ Skipping - server not available") - return + pytest.skip("Server not available on http://localhost:8181") # Test lightweight size_light, tokens_light = await measure_response_size(base_url, {"include_content": "false"}) @@ -75,8 +75,7 @@ async def test_tasks_endpoint(): if size_full > 0: print(f"Full content: {size_full:,} bytes | ~{tokens_full:,.0f} tokens") else: - print("⚠️ Skipping - server not available") - return + pytest.skip("Server not available on http://localhost:8181") # Test lightweight size_light, tokens_light = await measure_response_size(base_url, {"exclude_large_fields": "true"})