mirror of
https://github.com/coleam00/Archon.git
synced 2025-12-24 02:39:17 -05:00
refactor: port allocation from dual ports to flexible port ranges
- Change from fixed backend/frontend ports to 10-port ranges per work order - Support 20 concurrent work orders (200 ports: 9000-9199) - Add port availability checking with flexible allocation - Make git_worktree default sandbox type - Standardize API routes with /api/ prefix - Add comprehensive port allocation tests - Update environment file generation with PORT_0-PORT_9 variables - Maintain backward compatibility with BACKEND_PORT/FRONTEND_PORT aliases
This commit is contained in:
@@ -28,7 +28,7 @@ class SandboxType(str, Enum):
|
|||||||
"""Sandbox environment types"""
|
"""Sandbox environment types"""
|
||||||
|
|
||||||
GIT_BRANCH = "git_branch"
|
GIT_BRANCH = "git_branch"
|
||||||
GIT_WORKTREE = "git_worktree" # Placeholder for Phase 2+
|
GIT_WORKTREE = "git_worktree" # Fully implemented - recommended for concurrent execution
|
||||||
E2B = "e2b" # Placeholder for Phase 2+
|
E2B = "e2b" # Placeholder for Phase 2+
|
||||||
DAGGER = "dagger" # Placeholder for Phase 2+
|
DAGGER = "dagger" # Placeholder for Phase 2+
|
||||||
|
|
||||||
@@ -102,7 +102,10 @@ class CreateAgentWorkOrderRequest(BaseModel):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
repository_url: str = Field(..., description="Git repository URL")
|
repository_url: str = Field(..., description="Git repository URL")
|
||||||
sandbox_type: SandboxType = Field(..., description="Sandbox environment type")
|
sandbox_type: SandboxType = Field(
|
||||||
|
default=SandboxType.GIT_WORKTREE,
|
||||||
|
description="Sandbox environment type (defaults to git_worktree for efficient concurrent execution)"
|
||||||
|
)
|
||||||
user_request: str = Field(..., description="User's description of the work to be done")
|
user_request: str = Field(..., description="User's description of the work to be done")
|
||||||
selected_commands: list[str] = Field(
|
selected_commands: list[str] = Field(
|
||||||
default=["create-branch", "planning", "execute", "commit", "create-pr"],
|
default=["create-branch", "planning", "execute", "commit", "create-pr"],
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import time
|
|||||||
|
|
||||||
from ..models import CommandExecutionResult, SandboxSetupError
|
from ..models import CommandExecutionResult, SandboxSetupError
|
||||||
from ..utils.git_operations import get_current_branch
|
from ..utils.git_operations import get_current_branch
|
||||||
from ..utils.port_allocation import find_next_available_ports
|
from ..utils.port_allocation import find_available_port_range
|
||||||
from ..utils.structured_logger import get_logger
|
from ..utils.structured_logger import get_logger
|
||||||
from ..utils.worktree_operations import (
|
from ..utils.worktree_operations import (
|
||||||
create_worktree,
|
create_worktree,
|
||||||
@@ -33,8 +33,9 @@ class GitWorktreeSandbox:
|
|||||||
self.repository_url = repository_url
|
self.repository_url = repository_url
|
||||||
self.sandbox_identifier = sandbox_identifier
|
self.sandbox_identifier = sandbox_identifier
|
||||||
self.working_dir = get_worktree_path(repository_url, sandbox_identifier)
|
self.working_dir = get_worktree_path(repository_url, sandbox_identifier)
|
||||||
self.backend_port: int | None = None
|
self.port_range_start: int | None = None
|
||||||
self.frontend_port: int | None = None
|
self.port_range_end: int | None = None
|
||||||
|
self.available_ports: list[int] = []
|
||||||
self._logger = logger.bind(
|
self._logger = logger.bind(
|
||||||
sandbox_identifier=sandbox_identifier,
|
sandbox_identifier=sandbox_identifier,
|
||||||
repository_url=repository_url,
|
repository_url=repository_url,
|
||||||
@@ -43,19 +44,21 @@ class GitWorktreeSandbox:
|
|||||||
async def setup(self) -> None:
|
async def setup(self) -> None:
|
||||||
"""Create worktree and set up isolated environment
|
"""Create worktree and set up isolated environment
|
||||||
|
|
||||||
Creates worktree from origin/main and allocates unique ports.
|
Creates worktree from origin/main and allocates a port range.
|
||||||
|
Each work order gets 10 ports for flexibility.
|
||||||
"""
|
"""
|
||||||
self._logger.info("worktree_sandbox_setup_started")
|
self._logger.info("worktree_sandbox_setup_started")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Allocate ports deterministically
|
# Allocate port range deterministically
|
||||||
self.backend_port, self.frontend_port = find_next_available_ports(
|
self.port_range_start, self.port_range_end, self.available_ports = find_available_port_range(
|
||||||
self.sandbox_identifier
|
self.sandbox_identifier
|
||||||
)
|
)
|
||||||
self._logger.info(
|
self._logger.info(
|
||||||
"ports_allocated",
|
"port_range_allocated",
|
||||||
backend_port=self.backend_port,
|
port_range_start=self.port_range_start,
|
||||||
frontend_port=self.frontend_port,
|
port_range_end=self.port_range_end,
|
||||||
|
available_ports_count=len(self.available_ports),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create worktree with temporary branch name
|
# Create worktree with temporary branch name
|
||||||
@@ -75,16 +78,17 @@ class GitWorktreeSandbox:
|
|||||||
# Set up environment with port configuration
|
# Set up environment with port configuration
|
||||||
setup_worktree_environment(
|
setup_worktree_environment(
|
||||||
worktree_path,
|
worktree_path,
|
||||||
self.backend_port,
|
self.port_range_start,
|
||||||
self.frontend_port,
|
self.port_range_end,
|
||||||
|
self.available_ports,
|
||||||
self._logger
|
self._logger
|
||||||
)
|
)
|
||||||
|
|
||||||
self._logger.info(
|
self._logger.info(
|
||||||
"worktree_sandbox_setup_completed",
|
"worktree_sandbox_setup_completed",
|
||||||
working_dir=self.working_dir,
|
working_dir=self.working_dir,
|
||||||
backend_port=self.backend_port,
|
port_range=f"{self.port_range_start}-{self.port_range_end}",
|
||||||
frontend_port=self.frontend_port,
|
available_ports_count=len(self.available_ports),
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,36 +1,54 @@
|
|||||||
"""Port allocation utilities for isolated agent work order execution.
|
"""Port allocation utilities for isolated agent work order execution.
|
||||||
|
|
||||||
Provides deterministic port allocation (backend: 9100-9114, frontend: 9200-9214)
|
Provides deterministic port range allocation (10 ports per work order)
|
||||||
based on work order ID to enable parallel execution without port conflicts.
|
based on work order ID to enable parallel execution without port conflicts.
|
||||||
|
|
||||||
|
Architecture:
|
||||||
|
- Each work order gets a range of 10 consecutive ports
|
||||||
|
- Base port: 9000
|
||||||
|
- Total range: 9000-9199 (200 ports)
|
||||||
|
- Supports: 20 concurrent work orders
|
||||||
|
- Ports can be used flexibly (CLI tools use 0, microservices use multiple)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
|
# Port allocation configuration
|
||||||
|
PORT_RANGE_SIZE = 10 # Each work order gets 10 ports
|
||||||
|
PORT_BASE = 9000 # Starting port
|
||||||
|
MAX_CONCURRENT_WORK_ORDERS = 20 # 200 ports / 10 = 20 concurrent
|
||||||
|
|
||||||
def get_ports_for_work_order(work_order_id: str) -> tuple[int, int]:
|
|
||||||
"""Deterministically assign ports based on work order ID.
|
def get_port_range_for_work_order(work_order_id: str) -> tuple[int, int]:
|
||||||
|
"""Get port range for work order.
|
||||||
|
|
||||||
|
Deterministically assigns a 10-port range based on work order ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
work_order_id: The work order identifier
|
work_order_id: The work order identifier
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (backend_port, frontend_port)
|
Tuple of (start_port, end_port)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
wo-abc123 -> (9000, 9009) # 10 ports
|
||||||
|
wo-def456 -> (9010, 9019) # 10 ports
|
||||||
|
wo-xyz789 -> (9020, 9029) # 10 ports
|
||||||
"""
|
"""
|
||||||
# Convert first 8 chars of work order ID to index (0-14)
|
# Convert work order ID to slot (0-19)
|
||||||
# Using base 36 conversion and modulo for consistent mapping
|
|
||||||
try:
|
try:
|
||||||
# Take first 8 alphanumeric chars and convert from base 36
|
# Take first 8 alphanumeric chars and convert from base 36
|
||||||
id_chars = ''.join(c for c in work_order_id[:8] if c.isalnum())
|
id_chars = ''.join(c for c in work_order_id[:8] if c.isalnum())
|
||||||
index = int(id_chars, 36) % 15
|
slot = int(id_chars, 36) % MAX_CONCURRENT_WORK_ORDERS
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Fallback to simple hash if conversion fails
|
# Fallback to simple hash if conversion fails
|
||||||
index = hash(work_order_id) % 15
|
slot = hash(work_order_id) % MAX_CONCURRENT_WORK_ORDERS
|
||||||
|
|
||||||
backend_port = 9100 + index
|
start_port = PORT_BASE + (slot * PORT_RANGE_SIZE)
|
||||||
frontend_port = 9200 + index
|
end_port = start_port + PORT_RANGE_SIZE - 1
|
||||||
|
|
||||||
return backend_port, frontend_port
|
return start_port, end_port
|
||||||
|
|
||||||
|
|
||||||
def is_port_available(port: int) -> bool:
|
def is_port_available(port: int) -> bool:
|
||||||
@@ -51,12 +69,138 @@ def is_port_available(port: int) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def find_next_available_ports(work_order_id: str, max_attempts: int = 15) -> tuple[int, int]:
|
def find_available_port_range(
|
||||||
"""Find available ports starting from deterministic assignment.
|
work_order_id: str, max_attempts: int = MAX_CONCURRENT_WORK_ORDERS
|
||||||
|
) -> tuple[int, int, list[int]]:
|
||||||
|
"""Find available port range and check which ports are actually free.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
work_order_id: The work order ID
|
work_order_id: The work order ID
|
||||||
max_attempts: Maximum number of attempts (default 15)
|
max_attempts: Maximum number of slot attempts (default 20)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (start_port, end_port, available_ports)
|
||||||
|
available_ports is a list of ports in the range that are actually free
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If no suitable port range found after max_attempts
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> find_available_port_range("wo-abc123")
|
||||||
|
(9000, 9009, [9000, 9001, 9002, 9003, 9004, 9005, 9006, 9007, 9008, 9009])
|
||||||
|
"""
|
||||||
|
start_port, end_port = get_port_range_for_work_order(work_order_id)
|
||||||
|
base_slot = (start_port - PORT_BASE) // PORT_RANGE_SIZE
|
||||||
|
|
||||||
|
# Try multiple slots if first one has conflicts
|
||||||
|
for offset in range(max_attempts):
|
||||||
|
slot = (base_slot + offset) % MAX_CONCURRENT_WORK_ORDERS
|
||||||
|
current_start = PORT_BASE + (slot * PORT_RANGE_SIZE)
|
||||||
|
current_end = current_start + PORT_RANGE_SIZE - 1
|
||||||
|
|
||||||
|
# Check which ports in this range are available
|
||||||
|
available = []
|
||||||
|
for port in range(current_start, current_end + 1):
|
||||||
|
if is_port_available(port):
|
||||||
|
available.append(port)
|
||||||
|
|
||||||
|
# If we have at least half the ports available, use this range
|
||||||
|
# (allows for some port conflicts while still being usable)
|
||||||
|
if len(available) >= PORT_RANGE_SIZE // 2:
|
||||||
|
return current_start, current_end, available
|
||||||
|
|
||||||
|
raise RuntimeError(
|
||||||
|
f"No suitable port range found after {max_attempts} attempts. "
|
||||||
|
f"Try stopping other services or wait for work orders to complete."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_ports_env_file(
|
||||||
|
worktree_path: str,
|
||||||
|
start_port: int,
|
||||||
|
end_port: int,
|
||||||
|
available_ports: list[int]
|
||||||
|
) -> None:
|
||||||
|
"""Create .ports.env file in worktree with port range configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
worktree_path: Path to the worktree
|
||||||
|
start_port: Start of port range
|
||||||
|
end_port: End of port range
|
||||||
|
available_ports: List of actually available ports in range
|
||||||
|
|
||||||
|
Generated file format:
|
||||||
|
# Port range information
|
||||||
|
PORT_RANGE_START=9000
|
||||||
|
PORT_RANGE_END=9009
|
||||||
|
PORT_RANGE_SIZE=10
|
||||||
|
|
||||||
|
# Individual ports (PORT_0, PORT_1, ...)
|
||||||
|
PORT_0=9000
|
||||||
|
PORT_1=9001
|
||||||
|
...
|
||||||
|
PORT_9=9009
|
||||||
|
|
||||||
|
# Convenience aliases (backward compatible)
|
||||||
|
BACKEND_PORT=9000
|
||||||
|
FRONTEND_PORT=9001
|
||||||
|
VITE_BACKEND_URL=http://localhost:9000
|
||||||
|
"""
|
||||||
|
ports_env_path = os.path.join(worktree_path, ".ports.env")
|
||||||
|
|
||||||
|
with open(ports_env_path, "w") as f:
|
||||||
|
# Header
|
||||||
|
f.write("# Port range allocated to this work order\n")
|
||||||
|
f.write("# Each work order gets 10 consecutive ports for flexibility\n")
|
||||||
|
f.write("# CLI tools can ignore ports, microservices can use multiple\n\n")
|
||||||
|
|
||||||
|
# Range information
|
||||||
|
f.write(f"PORT_RANGE_START={start_port}\n")
|
||||||
|
f.write(f"PORT_RANGE_END={end_port}\n")
|
||||||
|
f.write(f"PORT_RANGE_SIZE={end_port - start_port + 1}\n\n")
|
||||||
|
|
||||||
|
# Individual numbered ports for easy access
|
||||||
|
f.write("# Individual ports (use PORT_0, PORT_1, etc.)\n")
|
||||||
|
for i, port in enumerate(available_ports):
|
||||||
|
f.write(f"PORT_{i}={port}\n")
|
||||||
|
|
||||||
|
# Backward compatible aliases
|
||||||
|
f.write("\n# Convenience aliases (backward compatible with old format)\n")
|
||||||
|
if len(available_ports) >= 1:
|
||||||
|
f.write(f"BACKEND_PORT={available_ports[0]}\n")
|
||||||
|
if len(available_ports) >= 2:
|
||||||
|
f.write(f"FRONTEND_PORT={available_ports[1]}\n")
|
||||||
|
f.write(f"VITE_BACKEND_URL=http://localhost:{available_ports[0]}\n")
|
||||||
|
|
||||||
|
|
||||||
|
# Backward compatibility function (deprecated, but kept for migration)
|
||||||
|
def get_ports_for_work_order(work_order_id: str) -> tuple[int, int]:
|
||||||
|
"""DEPRECATED: Get backend and frontend ports.
|
||||||
|
|
||||||
|
This function is kept for backward compatibility during migration.
|
||||||
|
Use get_port_range_for_work_order() and find_available_port_range() instead.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
work_order_id: The work order identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (backend_port, frontend_port)
|
||||||
|
"""
|
||||||
|
start_port, end_port = get_port_range_for_work_order(work_order_id)
|
||||||
|
# Return first two ports in range as backend/frontend
|
||||||
|
return start_port, start_port + 1
|
||||||
|
|
||||||
|
|
||||||
|
# Backward compatibility function (deprecated, but kept for migration)
|
||||||
|
def find_next_available_ports(work_order_id: str, max_attempts: int = 20) -> tuple[int, int]:
|
||||||
|
"""DEPRECATED: Find available backend and frontend ports.
|
||||||
|
|
||||||
|
This function is kept for backward compatibility during migration.
|
||||||
|
Use find_available_port_range() instead.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
work_order_id: The work order ID
|
||||||
|
max_attempts: Maximum number of attempts (default 20)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (backend_port, frontend_port)
|
Tuple of (backend_port, frontend_port)
|
||||||
@@ -64,31 +208,13 @@ def find_next_available_ports(work_order_id: str, max_attempts: int = 15) -> tup
|
|||||||
Raises:
|
Raises:
|
||||||
RuntimeError: If no available ports found
|
RuntimeError: If no available ports found
|
||||||
"""
|
"""
|
||||||
base_backend, base_frontend = get_ports_for_work_order(work_order_id)
|
start_port, end_port, available_ports = find_available_port_range(
|
||||||
base_index = base_backend - 9100
|
work_order_id, max_attempts
|
||||||
|
)
|
||||||
|
|
||||||
for offset in range(max_attempts):
|
if len(available_ports) < 2:
|
||||||
index = (base_index + offset) % 15
|
raise RuntimeError(
|
||||||
backend_port = 9100 + index
|
f"Need at least 2 ports, only {len(available_ports)} available in range"
|
||||||
frontend_port = 9200 + index
|
)
|
||||||
|
|
||||||
if is_port_available(backend_port) and is_port_available(frontend_port):
|
return available_ports[0], available_ports[1]
|
||||||
return backend_port, frontend_port
|
|
||||||
|
|
||||||
raise RuntimeError("No available ports in the allocated range")
|
|
||||||
|
|
||||||
|
|
||||||
def create_ports_env_file(worktree_path: str, backend_port: int, frontend_port: int) -> None:
|
|
||||||
"""Create .ports.env file in worktree with port configuration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
worktree_path: Path to the worktree
|
|
||||||
backend_port: Backend port number
|
|
||||||
frontend_port: Frontend port number
|
|
||||||
"""
|
|
||||||
ports_env_path = os.path.join(worktree_path, ".ports.env")
|
|
||||||
|
|
||||||
with open(ports_env_path, "w") as f:
|
|
||||||
f.write(f"BACKEND_PORT={backend_port}\n")
|
|
||||||
f.write(f"FRONTEND_PORT={frontend_port}\n")
|
|
||||||
f.write(f"VITE_BACKEND_URL=http://localhost:{backend_port}\n")
|
|
||||||
|
|||||||
@@ -266,8 +266,9 @@ def remove_worktree(
|
|||||||
|
|
||||||
def setup_worktree_environment(
|
def setup_worktree_environment(
|
||||||
worktree_path: str,
|
worktree_path: str,
|
||||||
backend_port: int,
|
start_port: int,
|
||||||
frontend_port: int,
|
end_port: int,
|
||||||
|
available_ports: list[int],
|
||||||
logger: "structlog.stdlib.BoundLogger"
|
logger: "structlog.stdlib.BoundLogger"
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up worktree environment by creating .ports.env file.
|
"""Set up worktree environment by creating .ports.env file.
|
||||||
@@ -277,9 +278,13 @@ def setup_worktree_environment(
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
worktree_path: Path to the worktree
|
worktree_path: Path to the worktree
|
||||||
backend_port: Backend port number
|
start_port: Start of port range
|
||||||
frontend_port: Frontend port number
|
end_port: End of port range
|
||||||
|
available_ports: List of available ports in range
|
||||||
logger: Logger instance
|
logger: Logger instance
|
||||||
"""
|
"""
|
||||||
create_ports_env_file(worktree_path, backend_port, frontend_port)
|
create_ports_env_file(worktree_path, start_port, end_port, available_ports)
|
||||||
logger.info(f"Created .ports.env with Backend: {backend_port}, Frontend: {frontend_port}")
|
logger.info(
|
||||||
|
f"Created .ports.env with port range {start_port}-{end_port} "
|
||||||
|
f"({len(available_ports)} available ports)"
|
||||||
|
)
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ class WorkflowOrchestrator:
|
|||||||
branch_name = context.get("create-branch")
|
branch_name = context.get("create-branch")
|
||||||
git_stats = await self._calculate_git_stats(
|
git_stats = await self._calculate_git_stats(
|
||||||
branch_name,
|
branch_name,
|
||||||
sandbox.get_working_directory()
|
sandbox.working_dir
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.state_repository.update_status(
|
await self.state_repository.update_status(
|
||||||
@@ -188,7 +188,7 @@ class WorkflowOrchestrator:
|
|||||||
branch_name = context.get("create-branch")
|
branch_name = context.get("create-branch")
|
||||||
if branch_name:
|
if branch_name:
|
||||||
git_stats = await self._calculate_git_stats(
|
git_stats = await self._calculate_git_stats(
|
||||||
branch_name, sandbox.get_working_directory()
|
branch_name, sandbox.working_dir
|
||||||
)
|
)
|
||||||
await self.state_repository.update_status(
|
await self.state_repository.update_status(
|
||||||
agent_work_order_id,
|
agent_work_order_id,
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from datetime import datetime
|
|||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
from src.agent_work_orders.main import app
|
from src.agent_work_orders.server import app
|
||||||
from src.agent_work_orders.models import (
|
from src.agent_work_orders.models import (
|
||||||
AgentWorkOrderStatus,
|
AgentWorkOrderStatus,
|
||||||
AgentWorkflowType,
|
AgentWorkflowType,
|
||||||
@@ -38,7 +38,7 @@ def test_create_agent_work_order():
|
|||||||
"github_issue_number": "42",
|
"github_issue_number": "42",
|
||||||
}
|
}
|
||||||
|
|
||||||
response = client.post("/agent-work-orders", json=request_data)
|
response = client.post("/api/agent-work-orders/", json=request_data)
|
||||||
|
|
||||||
assert response.status_code == 201
|
assert response.status_code == 201
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -59,7 +59,7 @@ def test_create_agent_work_order_without_issue():
|
|||||||
"user_request": "Fix the login bug where users can't sign in",
|
"user_request": "Fix the login bug where users can't sign in",
|
||||||
}
|
}
|
||||||
|
|
||||||
response = client.post("/agent-work-orders", json=request_data)
|
response = client.post("/api/agent-work-orders/", json=request_data)
|
||||||
|
|
||||||
assert response.status_code == 201
|
assert response.status_code == 201
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -73,7 +73,7 @@ def test_create_agent_work_order_invalid_data():
|
|||||||
# Missing required fields
|
# Missing required fields
|
||||||
}
|
}
|
||||||
|
|
||||||
response = client.post("/agent-work-orders", json=request_data)
|
response = client.post("/api/agent-work-orders/", json=request_data)
|
||||||
|
|
||||||
assert response.status_code == 422 # Validation error
|
assert response.status_code == 422 # Validation error
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ def test_list_agent_work_orders_empty():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.list = AsyncMock(return_value=[])
|
mock_repo.list = AsyncMock(return_value=[])
|
||||||
|
|
||||||
response = client.get("/agent-work-orders")
|
response = client.get("/api/agent-work-orders/")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -117,7 +117,7 @@ def test_list_agent_work_orders_with_data():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.list = AsyncMock(return_value=[(state, metadata)])
|
mock_repo.list = AsyncMock(return_value=[(state, metadata)])
|
||||||
|
|
||||||
response = client.get("/agent-work-orders")
|
response = client.get("/api/agent-work-orders/")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -131,7 +131,7 @@ def test_list_agent_work_orders_with_status_filter():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.list = AsyncMock(return_value=[])
|
mock_repo.list = AsyncMock(return_value=[])
|
||||||
|
|
||||||
response = client.get("/agent-work-orders?status=running")
|
response = client.get("/api/agent-work-orders/?status=running")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
mock_repo.list.assert_called_once()
|
mock_repo.list.assert_called_once()
|
||||||
@@ -166,7 +166,7 @@ def test_get_agent_work_order():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-test123")
|
response = client.get("/api/agent-work-orders/wo-test123")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -181,7 +181,7 @@ def test_get_agent_work_order_not_found():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.get = AsyncMock(return_value=None)
|
mock_repo.get = AsyncMock(return_value=None)
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-nonexistent")
|
response = client.get("/api/agent-work-orders/wo-nonexistent")
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@@ -212,7 +212,7 @@ def test_get_git_progress():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-test123/git-progress")
|
response = client.get("/api/agent-work-orders/wo-test123/git-progress")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -227,7 +227,7 @@ def test_get_git_progress_not_found():
|
|||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
mock_repo.get = AsyncMock(return_value=None)
|
mock_repo.get = AsyncMock(return_value=None)
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-nonexistent/git-progress")
|
response = client.get("/api/agent-work-orders/wo-nonexistent/git-progress")
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@@ -239,7 +239,7 @@ def test_send_prompt_to_agent():
|
|||||||
"prompt_text": "Continue with the next step",
|
"prompt_text": "Continue with the next step",
|
||||||
}
|
}
|
||||||
|
|
||||||
response = client.post("/agent-work-orders/wo-test123/prompt", json=request_data)
|
response = client.post("/api/agent-work-orders/wo-test123/prompt", json=request_data)
|
||||||
|
|
||||||
# Currently returns success but doesn't actually send (Phase 2+)
|
# Currently returns success but doesn't actually send (Phase 2+)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
@@ -249,7 +249,7 @@ def test_send_prompt_to_agent():
|
|||||||
|
|
||||||
def test_get_logs():
|
def test_get_logs():
|
||||||
"""Test getting logs (placeholder)"""
|
"""Test getting logs (placeholder)"""
|
||||||
response = client.get("/agent-work-orders/wo-test123/logs")
|
response = client.get("/api/agent-work-orders/wo-test123/logs")
|
||||||
|
|
||||||
# Currently returns empty logs (Phase 2+)
|
# Currently returns empty logs (Phase 2+)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
@@ -275,7 +275,7 @@ def test_verify_repository_success():
|
|||||||
|
|
||||||
request_data = {"repository_url": "https://github.com/owner/repo"}
|
request_data = {"repository_url": "https://github.com/owner/repo"}
|
||||||
|
|
||||||
response = client.post("/github/verify-repository", json=request_data)
|
response = client.post("/api/agent-work-orders/github/verify-repository", json=request_data)
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -292,7 +292,7 @@ def test_verify_repository_failure():
|
|||||||
|
|
||||||
request_data = {"repository_url": "https://github.com/owner/nonexistent"}
|
request_data = {"repository_url": "https://github.com/owner/nonexistent"}
|
||||||
|
|
||||||
response = client.post("/github/verify-repository", json=request_data)
|
response = client.post("/api/agent-work-orders/github/verify-repository", json=request_data)
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -302,7 +302,7 @@ def test_verify_repository_failure():
|
|||||||
|
|
||||||
def test_get_agent_work_order_steps():
|
def test_get_agent_work_order_steps():
|
||||||
"""Test getting step history for a work order"""
|
"""Test getting step history for a work order"""
|
||||||
from src.agent_work_orders.models import StepExecutionResult, StepHistory, WorkflowStep
|
from src.agent_work_orders.models import AgentWorkOrderState, StepExecutionResult, StepHistory, WorkflowStep
|
||||||
|
|
||||||
# Create step history
|
# Create step history
|
||||||
step_history = StepHistory(
|
step_history = StepHistory(
|
||||||
@@ -325,10 +325,28 @@ def test_get_agent_work_order_steps():
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Mock state for get() call
|
||||||
|
state = AgentWorkOrderState(
|
||||||
|
agent_work_order_id="wo-test123",
|
||||||
|
repository_url="https://github.com/owner/repo",
|
||||||
|
sandbox_identifier="sandbox-wo-test123",
|
||||||
|
git_branch_name="feat-wo-test123",
|
||||||
|
agent_session_id="session-123",
|
||||||
|
)
|
||||||
|
metadata = {
|
||||||
|
"sandbox_type": SandboxType.GIT_BRANCH,
|
||||||
|
"github_issue_number": None,
|
||||||
|
"status": AgentWorkOrderStatus.RUNNING,
|
||||||
|
"current_phase": None,
|
||||||
|
"created_at": datetime.now(),
|
||||||
|
"updated_at": datetime.now(),
|
||||||
|
}
|
||||||
|
|
||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
|
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
||||||
mock_repo.get_step_history = AsyncMock(return_value=step_history)
|
mock_repo.get_step_history = AsyncMock(return_value=step_history)
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-test123/steps")
|
response = client.get("/api/agent-work-orders/wo-test123/steps")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -344,9 +362,10 @@ def test_get_agent_work_order_steps():
|
|||||||
def test_get_agent_work_order_steps_not_found():
|
def test_get_agent_work_order_steps_not_found():
|
||||||
"""Test getting step history for non-existent work order"""
|
"""Test getting step history for non-existent work order"""
|
||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
|
mock_repo.get = AsyncMock(return_value=None)
|
||||||
mock_repo.get_step_history = AsyncMock(return_value=None)
|
mock_repo.get_step_history = AsyncMock(return_value=None)
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-nonexistent/steps")
|
response = client.get("/api/agent-work-orders/wo-nonexistent/steps")
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -355,14 +374,32 @@ def test_get_agent_work_order_steps_not_found():
|
|||||||
|
|
||||||
def test_get_agent_work_order_steps_empty():
|
def test_get_agent_work_order_steps_empty():
|
||||||
"""Test getting empty step history"""
|
"""Test getting empty step history"""
|
||||||
from src.agent_work_orders.models import StepHistory
|
from src.agent_work_orders.models import AgentWorkOrderState, StepHistory
|
||||||
|
|
||||||
step_history = StepHistory(agent_work_order_id="wo-test123", steps=[])
|
step_history = StepHistory(agent_work_order_id="wo-test123", steps=[])
|
||||||
|
|
||||||
|
# Mock state for get() call
|
||||||
|
state = AgentWorkOrderState(
|
||||||
|
agent_work_order_id="wo-test123",
|
||||||
|
repository_url="https://github.com/owner/repo",
|
||||||
|
sandbox_identifier="sandbox-wo-test123",
|
||||||
|
git_branch_name=None,
|
||||||
|
agent_session_id=None,
|
||||||
|
)
|
||||||
|
metadata = {
|
||||||
|
"sandbox_type": SandboxType.GIT_BRANCH,
|
||||||
|
"github_issue_number": None,
|
||||||
|
"status": AgentWorkOrderStatus.PENDING,
|
||||||
|
"current_phase": None,
|
||||||
|
"created_at": datetime.now(),
|
||||||
|
"updated_at": datetime.now(),
|
||||||
|
}
|
||||||
|
|
||||||
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
with patch("src.agent_work_orders.api.routes.state_repository") as mock_repo:
|
||||||
|
mock_repo.get = AsyncMock(return_value=(state, metadata))
|
||||||
mock_repo.get_step_history = AsyncMock(return_value=step_history)
|
mock_repo.get_step_history = AsyncMock(return_value=step_history)
|
||||||
|
|
||||||
response = client.get("/agent-work-orders/wo-test123/steps")
|
response = client.get("/api/agent-work-orders/wo-test123/steps")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
Tests configuration loading, service discovery, and URL construction.
|
Tests configuration loading, service discovery, and URL construction.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import importlib
|
||||||
import pytest
|
import pytest
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
@@ -38,6 +39,8 @@ def test_config_local_service_discovery():
|
|||||||
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
|
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
|
||||||
def test_config_docker_service_discovery():
|
def test_config_docker_service_discovery():
|
||||||
"""Test docker_compose service discovery mode"""
|
"""Test docker_compose service discovery mode"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
@@ -73,6 +76,8 @@ def test_config_explicit_mcp_url_override():
|
|||||||
@patch.dict("os.environ", {"CLAUDE_CLI_PATH": "/custom/path/to/claude"})
|
@patch.dict("os.environ", {"CLAUDE_CLI_PATH": "/custom/path/to/claude"})
|
||||||
def test_config_claude_cli_path_override():
|
def test_config_claude_cli_path_override():
|
||||||
"""Test CLAUDE_CLI_PATH can be overridden"""
|
"""Test CLAUDE_CLI_PATH can be overridden"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
@@ -84,6 +89,8 @@ def test_config_claude_cli_path_override():
|
|||||||
@patch.dict("os.environ", {"LOG_LEVEL": "DEBUG"})
|
@patch.dict("os.environ", {"LOG_LEVEL": "DEBUG"})
|
||||||
def test_config_log_level_override():
|
def test_config_log_level_override():
|
||||||
"""Test LOG_LEVEL can be overridden"""
|
"""Test LOG_LEVEL can be overridden"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
@@ -95,6 +102,8 @@ def test_config_log_level_override():
|
|||||||
@patch.dict("os.environ", {"CORS_ORIGINS": "http://example.com,http://test.com"})
|
@patch.dict("os.environ", {"CORS_ORIGINS": "http://example.com,http://test.com"})
|
||||||
def test_config_cors_origins_override():
|
def test_config_cors_origins_override():
|
||||||
"""Test CORS_ORIGINS can be overridden"""
|
"""Test CORS_ORIGINS can be overridden"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
@@ -105,13 +114,16 @@ def test_config_cors_origins_override():
|
|||||||
@pytest.mark.unit
|
@pytest.mark.unit
|
||||||
def test_config_ensure_temp_dir(tmp_path):
|
def test_config_ensure_temp_dir(tmp_path):
|
||||||
"""Test ensure_temp_dir creates directory"""
|
"""Test ensure_temp_dir creates directory"""
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
|
||||||
import os
|
import os
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
|
||||||
# Use tmp_path for testing
|
# Use tmp_path for testing
|
||||||
test_temp_dir = str(tmp_path / "test-agent-work-orders")
|
test_temp_dir = str(tmp_path / "test-agent-work-orders")
|
||||||
|
|
||||||
with patch.dict("os.environ", {"AGENT_WORK_ORDER_TEMP_DIR": test_temp_dir}):
|
with patch.dict("os.environ", {"AGENT_WORK_ORDER_TEMP_DIR": test_temp_dir}):
|
||||||
|
importlib.reload(config_module)
|
||||||
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
temp_dir = config.ensure_temp_dir()
|
temp_dir = config.ensure_temp_dir()
|
||||||
|
|
||||||
@@ -130,6 +142,8 @@ def test_config_ensure_temp_dir(tmp_path):
|
|||||||
)
|
)
|
||||||
def test_config_explicit_url_overrides_discovery_mode():
|
def test_config_explicit_url_overrides_discovery_mode():
|
||||||
"""Test explicit URL takes precedence over service discovery mode"""
|
"""Test explicit URL takes precedence over service discovery mode"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
@@ -154,6 +168,8 @@ def test_config_state_storage_type():
|
|||||||
@patch.dict("os.environ", {"FILE_STATE_DIRECTORY": "/custom/state/dir"})
|
@patch.dict("os.environ", {"FILE_STATE_DIRECTORY": "/custom/state/dir"})
|
||||||
def test_config_file_state_directory():
|
def test_config_file_state_directory():
|
||||||
"""Test FILE_STATE_DIRECTORY configuration"""
|
"""Test FILE_STATE_DIRECTORY configuration"""
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
config = AgentWorkOrdersConfig()
|
config = AgentWorkOrdersConfig()
|
||||||
|
|||||||
294
python/tests/agent_work_orders/test_port_allocation.py
Normal file
294
python/tests/agent_work_orders/test_port_allocation.py
Normal file
@@ -0,0 +1,294 @@
|
|||||||
|
"""Tests for Port Allocation with 10-Port Ranges"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from src.agent_work_orders.utils.port_allocation import (
|
||||||
|
get_port_range_for_work_order,
|
||||||
|
is_port_available,
|
||||||
|
find_available_port_range,
|
||||||
|
create_ports_env_file,
|
||||||
|
PORT_RANGE_SIZE,
|
||||||
|
PORT_BASE,
|
||||||
|
MAX_CONCURRENT_WORK_ORDERS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_get_port_range_for_work_order_deterministic():
|
||||||
|
"""Test that same work order ID always gets same port range"""
|
||||||
|
work_order_id = "wo-abc123"
|
||||||
|
|
||||||
|
start1, end1 = get_port_range_for_work_order(work_order_id)
|
||||||
|
start2, end2 = get_port_range_for_work_order(work_order_id)
|
||||||
|
|
||||||
|
assert start1 == start2
|
||||||
|
assert end1 == end2
|
||||||
|
assert end1 - start1 + 1 == PORT_RANGE_SIZE # 10 ports
|
||||||
|
assert PORT_BASE <= start1 < PORT_BASE + (MAX_CONCURRENT_WORK_ORDERS * PORT_RANGE_SIZE)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_get_port_range_for_work_order_size():
|
||||||
|
"""Test that port range is exactly 10 ports"""
|
||||||
|
work_order_id = "wo-test123"
|
||||||
|
|
||||||
|
start, end = get_port_range_for_work_order(work_order_id)
|
||||||
|
|
||||||
|
assert end - start + 1 == 10
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_get_port_range_for_work_order_uses_different_slots():
|
||||||
|
"""Test that the hash function can produce different slot assignments"""
|
||||||
|
# Create very different IDs that should hash to different values
|
||||||
|
ids = ["wo-aaaaaaaa", "wo-zzzzz999", "wo-12345678", "wo-abcdefgh", "wo-99999999"]
|
||||||
|
ranges = [get_port_range_for_work_order(wid) for wid in ids]
|
||||||
|
|
||||||
|
# Check all ranges are valid
|
||||||
|
for start, end in ranges:
|
||||||
|
assert end - start + 1 == 10
|
||||||
|
assert PORT_BASE <= start < PORT_BASE + (MAX_CONCURRENT_WORK_ORDERS * PORT_RANGE_SIZE)
|
||||||
|
|
||||||
|
# It's theoretically possible all hash to same slot, but unlikely with very different IDs
|
||||||
|
# The important thing is the function works, not that it always distributes perfectly
|
||||||
|
assert len(ranges) == 5 # We got 5 results
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_get_port_range_for_work_order_fallback_hash():
|
||||||
|
"""Test fallback to hash when base36 conversion fails"""
|
||||||
|
# Non-alphanumeric work order ID
|
||||||
|
work_order_id = "--------"
|
||||||
|
|
||||||
|
start, end = get_port_range_for_work_order(work_order_id)
|
||||||
|
|
||||||
|
# Should still work via hash fallback
|
||||||
|
assert end - start + 1 == 10
|
||||||
|
assert PORT_BASE <= start < PORT_BASE + (MAX_CONCURRENT_WORK_ORDERS * PORT_RANGE_SIZE)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_is_port_available_mock_available():
|
||||||
|
"""Test port availability check when port is available"""
|
||||||
|
with patch("socket.socket") as mock_socket:
|
||||||
|
mock_socket_instance = mock_socket.return_value.__enter__.return_value
|
||||||
|
mock_socket_instance.bind.return_value = None # Successful bind
|
||||||
|
|
||||||
|
result = is_port_available(9000)
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
mock_socket_instance.bind.assert_called_once_with(('localhost', 9000))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_is_port_available_mock_unavailable():
|
||||||
|
"""Test port availability check when port is unavailable"""
|
||||||
|
with patch("socket.socket") as mock_socket:
|
||||||
|
mock_socket_instance = mock_socket.return_value.__enter__.return_value
|
||||||
|
mock_socket_instance.bind.side_effect = OSError("Port in use")
|
||||||
|
|
||||||
|
result = is_port_available(9000)
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_find_available_port_range_all_available():
|
||||||
|
"""Test finding port range when all ports are available"""
|
||||||
|
work_order_id = "wo-test123"
|
||||||
|
|
||||||
|
# Mock all ports as available
|
||||||
|
with patch(
|
||||||
|
"src.agent_work_orders.utils.port_allocation.is_port_available",
|
||||||
|
return_value=True,
|
||||||
|
):
|
||||||
|
start, end, available = find_available_port_range(work_order_id)
|
||||||
|
|
||||||
|
# Should get the deterministic range
|
||||||
|
expected_start, expected_end = get_port_range_for_work_order(work_order_id)
|
||||||
|
assert start == expected_start
|
||||||
|
assert end == expected_end
|
||||||
|
assert len(available) == 10 # All 10 ports available
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_find_available_port_range_some_unavailable():
|
||||||
|
"""Test finding port range when some ports are unavailable"""
|
||||||
|
work_order_id = "wo-test123"
|
||||||
|
expected_start, expected_end = get_port_range_for_work_order(work_order_id)
|
||||||
|
|
||||||
|
# Mock: first, third, and fifth ports unavailable, rest available
|
||||||
|
def mock_availability(port):
|
||||||
|
offset = port - expected_start
|
||||||
|
return offset not in [0, 2, 4] # 7 out of 10 available
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"src.agent_work_orders.utils.port_allocation.is_port_available",
|
||||||
|
side_effect=mock_availability,
|
||||||
|
):
|
||||||
|
start, end, available = find_available_port_range(work_order_id)
|
||||||
|
|
||||||
|
# Should still use this range (>= 5 ports available)
|
||||||
|
assert start == expected_start
|
||||||
|
assert end == expected_end
|
||||||
|
assert len(available) == 7 # 7 ports available
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_find_available_port_range_fallback_to_next_slot():
|
||||||
|
"""Test fallback to next slot when first slot has too few ports"""
|
||||||
|
work_order_id = "wo-test123"
|
||||||
|
expected_start, expected_end = get_port_range_for_work_order(work_order_id)
|
||||||
|
|
||||||
|
# Mock: First slot has only 3 available (< 5 needed), second slot has all
|
||||||
|
def mock_availability(port):
|
||||||
|
if expected_start <= port <= expected_end:
|
||||||
|
# First slot: only 3 available
|
||||||
|
offset = port - expected_start
|
||||||
|
return offset < 3
|
||||||
|
else:
|
||||||
|
# Other slots: all available
|
||||||
|
return True
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"src.agent_work_orders.utils.port_allocation.is_port_available",
|
||||||
|
side_effect=mock_availability,
|
||||||
|
):
|
||||||
|
start, end, available = find_available_port_range(work_order_id)
|
||||||
|
|
||||||
|
# Should use a different slot
|
||||||
|
assert (start, end) != (expected_start, expected_end)
|
||||||
|
assert len(available) >= 5 # At least half available
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_find_available_port_range_exhausted():
|
||||||
|
"""Test that RuntimeError is raised when all port ranges are exhausted"""
|
||||||
|
work_order_id = "wo-test123"
|
||||||
|
|
||||||
|
# Mock all ports as unavailable
|
||||||
|
with patch(
|
||||||
|
"src.agent_work_orders.utils.port_allocation.is_port_available",
|
||||||
|
return_value=False,
|
||||||
|
):
|
||||||
|
with pytest.raises(RuntimeError) as exc_info:
|
||||||
|
find_available_port_range(work_order_id)
|
||||||
|
|
||||||
|
assert "No suitable port range found" in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_create_ports_env_file(tmp_path):
|
||||||
|
"""Test creating .ports.env file with port range"""
|
||||||
|
worktree_path = str(tmp_path)
|
||||||
|
start_port = 9000
|
||||||
|
end_port = 9009
|
||||||
|
available_ports = list(range(9000, 9010)) # All 10 ports
|
||||||
|
|
||||||
|
create_ports_env_file(worktree_path, start_port, end_port, available_ports)
|
||||||
|
|
||||||
|
ports_env_path = tmp_path / ".ports.env"
|
||||||
|
assert ports_env_path.exists()
|
||||||
|
|
||||||
|
content = ports_env_path.read_text()
|
||||||
|
|
||||||
|
# Check range information
|
||||||
|
assert "PORT_RANGE_START=9000" in content
|
||||||
|
assert "PORT_RANGE_END=9009" in content
|
||||||
|
assert "PORT_RANGE_SIZE=10" in content
|
||||||
|
|
||||||
|
# Check individual ports
|
||||||
|
assert "PORT_0=9000" in content
|
||||||
|
assert "PORT_1=9001" in content
|
||||||
|
assert "PORT_9=9009" in content
|
||||||
|
|
||||||
|
# Check backward compatible aliases
|
||||||
|
assert "BACKEND_PORT=9000" in content
|
||||||
|
assert "FRONTEND_PORT=9001" in content
|
||||||
|
assert "VITE_BACKEND_URL=http://localhost:9000" in content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_create_ports_env_file_partial_availability(tmp_path):
|
||||||
|
"""Test creating .ports.env with some ports unavailable"""
|
||||||
|
worktree_path = str(tmp_path)
|
||||||
|
start_port = 9000
|
||||||
|
end_port = 9009
|
||||||
|
# Only some ports available
|
||||||
|
available_ports = [9000, 9001, 9003, 9004, 9006, 9008, 9009] # 7 ports
|
||||||
|
|
||||||
|
create_ports_env_file(worktree_path, start_port, end_port, available_ports)
|
||||||
|
|
||||||
|
ports_env_path = tmp_path / ".ports.env"
|
||||||
|
content = ports_env_path.read_text()
|
||||||
|
|
||||||
|
# Range should still show full range
|
||||||
|
assert "PORT_RANGE_START=9000" in content
|
||||||
|
assert "PORT_RANGE_END=9009" in content
|
||||||
|
|
||||||
|
# But only available ports should be numbered
|
||||||
|
assert "PORT_0=9000" in content
|
||||||
|
assert "PORT_1=9001" in content
|
||||||
|
assert "PORT_2=9003" in content # Third available port is 9003
|
||||||
|
assert "PORT_6=9009" in content # Seventh available port is 9009
|
||||||
|
|
||||||
|
# Backward compatible aliases should use first two available
|
||||||
|
assert "BACKEND_PORT=9000" in content
|
||||||
|
assert "FRONTEND_PORT=9001" in content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_create_ports_env_file_overwrites(tmp_path):
|
||||||
|
"""Test that creating .ports.env file overwrites existing file"""
|
||||||
|
worktree_path = str(tmp_path)
|
||||||
|
ports_env_path = tmp_path / ".ports.env"
|
||||||
|
|
||||||
|
# Create existing file with old content
|
||||||
|
ports_env_path.write_text("OLD_CONTENT=true\n")
|
||||||
|
|
||||||
|
# Create new file
|
||||||
|
create_ports_env_file(
|
||||||
|
worktree_path, 9000, 9009, list(range(9000, 9010))
|
||||||
|
)
|
||||||
|
|
||||||
|
content = ports_env_path.read_text()
|
||||||
|
assert "OLD_CONTENT" not in content
|
||||||
|
assert "PORT_RANGE_START=9000" in content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_port_ranges_do_not_overlap():
|
||||||
|
"""Test that consecutive work order slots have non-overlapping port ranges"""
|
||||||
|
# Create work order IDs that will map to different slots
|
||||||
|
ids = [f"wo-{i:08x}" for i in range(5)] # Create 5 different IDs
|
||||||
|
|
||||||
|
ranges = [get_port_range_for_work_order(wid) for wid in ids]
|
||||||
|
|
||||||
|
# Check that ranges don't overlap
|
||||||
|
for i, (start1, end1) in enumerate(ranges):
|
||||||
|
for j, (start2, end2) in enumerate(ranges):
|
||||||
|
if i != j:
|
||||||
|
# Ranges should not overlap
|
||||||
|
overlaps = not (end1 < start2 or end2 < start1)
|
||||||
|
# If they overlap, they must be the same range (hash collision)
|
||||||
|
if overlaps:
|
||||||
|
assert start1 == start2 and end1 == end2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
def test_max_concurrent_work_orders():
|
||||||
|
"""Test that we support MAX_CONCURRENT_WORK_ORDERS distinct ranges"""
|
||||||
|
# Generate MAX_CONCURRENT_WORK_ORDERS + 1 IDs
|
||||||
|
ids = [f"wo-{i:08x}" for i in range(MAX_CONCURRENT_WORK_ORDERS + 1)]
|
||||||
|
|
||||||
|
ranges = [get_port_range_for_work_order(wid) for wid in ids]
|
||||||
|
unique_ranges = set(ranges)
|
||||||
|
|
||||||
|
# Should have at most MAX_CONCURRENT_WORK_ORDERS unique ranges
|
||||||
|
assert len(unique_ranges) <= MAX_CONCURRENT_WORK_ORDERS
|
||||||
|
|
||||||
|
# And they should all fit within the allocated port space
|
||||||
|
for start, end in unique_ranges:
|
||||||
|
assert PORT_BASE <= start < PORT_BASE + (MAX_CONCURRENT_WORK_ORDERS * PORT_RANGE_SIZE)
|
||||||
|
assert PORT_BASE < end <= PORT_BASE + (MAX_CONCURRENT_WORK_ORDERS * PORT_RANGE_SIZE)
|
||||||
@@ -190,6 +190,9 @@ def test_startup_logs_local_mode(caplog):
|
|||||||
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
|
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
|
||||||
def test_startup_logs_docker_mode(caplog):
|
def test_startup_logs_docker_mode(caplog):
|
||||||
"""Test startup logs docker_compose mode"""
|
"""Test startup logs docker_compose mode"""
|
||||||
|
import importlib
|
||||||
|
import src.agent_work_orders.config as config_module
|
||||||
|
importlib.reload(config_module)
|
||||||
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
from src.agent_work_orders.config import AgentWorkOrdersConfig
|
||||||
|
|
||||||
# Create fresh config instance with env var
|
# Create fresh config instance with env var
|
||||||
|
|||||||
Reference in New Issue
Block a user