feat: add agent work orders microservice with hybrid deployment

This commit is contained in:
Rasmus Widing
2025-10-23 12:46:57 +03:00
parent 8f3e8bc220
commit f07cefd1a1
23 changed files with 1741 additions and 93 deletions

View File

@@ -27,15 +27,40 @@ SUPABASE_SERVICE_KEY=
LOGFIRE_TOKEN= LOGFIRE_TOKEN=
LOG_LEVEL=INFO LOG_LEVEL=INFO
# Claude API Key (Required for Agent Work Orders)
# Get your API key from: https://console.anthropic.com/
# Required for the agent work orders service to execute Claude CLI commands
ANTHROPIC_API_KEY=
# Service Ports Configuration # Service Ports Configuration
# These ports are used for external access to the services # These ports are used for external access to the services
HOST=localhost HOST=localhost
ARCHON_SERVER_PORT=8181 ARCHON_SERVER_PORT=8181
ARCHON_MCP_PORT=8051 ARCHON_MCP_PORT=8051
ARCHON_AGENTS_PORT=8052 ARCHON_AGENTS_PORT=8052
AGENT_WORK_ORDERS_PORT=8053
ARCHON_UI_PORT=3737 ARCHON_UI_PORT=3737
ARCHON_DOCS_PORT=3838 ARCHON_DOCS_PORT=3838
# Agent Work Orders Service Configuration (Optional)
# Set these if running agent work orders service independently
# SERVICE_DISCOVERY_MODE: Controls how services find each other
# - "local": Services run on localhost with different ports
# - "docker_compose": Services use Docker container names
SERVICE_DISCOVERY_MODE=local
# Service URLs (for agent work orders service to call other services)
# These are automatically configured based on SERVICE_DISCOVERY_MODE
# Only override if you need custom service URLs
# ARCHON_SERVER_URL=http://localhost:8181
# ARCHON_MCP_URL=http://localhost:8051
# Agent Work Orders Persistence
# STATE_STORAGE_TYPE: "memory" (default, ephemeral) or "file" (persistent)
# FILE_STATE_DIRECTORY: Directory for file-based state storage
STATE_STORAGE_TYPE=file
FILE_STATE_DIRECTORY=agent-work-orders-state
# Frontend Configuration # Frontend Configuration
# VITE_ALLOWED_HOSTS: Comma-separated list of additional hosts allowed for Vite dev server # VITE_ALLOWED_HOSTS: Comma-separated list of additional hosts allowed for Vite dev server
# Example: VITE_ALLOWED_HOSTS=192.168.1.100,myhost.local,example.com # Example: VITE_ALLOWED_HOSTS=192.168.1.100,myhost.local,example.com

View File

@@ -104,12 +104,19 @@ uv run ruff check # Run linter
uv run ruff check --fix # Auto-fix linting issues uv run ruff check --fix # Auto-fix linting issues
uv run mypy src/ # Type check uv run mypy src/ # Type check
# Agent Work Orders Service (independent microservice)
make agent-work-orders # Run agent work orders service locally on 8053
# Or manually:
uv run python -m uvicorn src.agent_work_orders.server:app --port 8053 --reload
# Docker operations # Docker operations
docker compose up --build -d # Start all services docker compose up --build -d # Start all services
docker compose --profile backend up -d # Backend only (for hybrid dev) docker compose --profile backend up -d # Backend only (for hybrid dev)
docker compose logs -f archon-server # View server logs docker compose --profile work-orders up -d # Include agent work orders service
docker compose logs -f archon-mcp # View MCP server logs docker compose logs -f archon-server # View server logs
docker compose restart archon-server # Restart after code changes docker compose logs -f archon-mcp # View MCP server logs
docker compose logs -f archon-agent-work-orders # View agent work orders service logs
docker compose restart archon-server # Restart after code changes
docker compose down # Stop all services docker compose down # Stop all services
docker compose down -v # Stop and remove volumes docker compose down -v # Stop and remove volumes
``` ```
@@ -120,8 +127,19 @@ docker compose down -v # Stop and remove volumes
# Hybrid development (recommended) - backend in Docker, frontend local # Hybrid development (recommended) - backend in Docker, frontend local
make dev # Or manually: docker compose --profile backend up -d && cd archon-ui-main && npm run dev make dev # Or manually: docker compose --profile backend up -d && cd archon-ui-main && npm run dev
# Hybrid with Agent Work Orders Service - backend in Docker, agent work orders local
make dev-work-orders # Starts backend in Docker, prompts to run agent service in separate terminal
# Then in separate terminal:
make agent-work-orders # Start agent work orders service locally
# Full Docker mode # Full Docker mode
make dev-docker # Or: docker compose up --build -d make dev-docker # Or: docker compose up --build -d
docker compose --profile work-orders up -d # Include agent work orders service
# All Local (3 terminals) - for agent work orders service development
# Terminal 1: uv run python -m uvicorn src.server.main:app --port 8181 --reload
# Terminal 2: make agent-work-orders
# Terminal 3: cd archon-ui-main && npm run dev
# Run linters before committing # Run linters before committing
make lint # Runs both frontend and backend linters make lint # Runs both frontend and backend linters

View File

@@ -5,23 +5,27 @@ SHELL := /bin/bash
# Docker compose command - prefer newer 'docker compose' plugin over standalone 'docker-compose' # Docker compose command - prefer newer 'docker compose' plugin over standalone 'docker-compose'
COMPOSE ?= $(shell docker compose version >/dev/null 2>&1 && echo "docker compose" || echo "docker-compose") COMPOSE ?= $(shell docker compose version >/dev/null 2>&1 && echo "docker compose" || echo "docker-compose")
.PHONY: help dev dev-docker stop test test-fe test-be lint lint-fe lint-be clean install check .PHONY: help dev dev-docker dev-docker-full dev-work-orders dev-hybrid-work-orders stop test test-fe test-be lint lint-fe lint-be clean install check agent-work-orders
help: help:
@echo "Archon Development Commands" @echo "Archon Development Commands"
@echo "===========================" @echo "==========================="
@echo " make dev - Backend in Docker, frontend local (recommended)" @echo " make dev - Backend in Docker, frontend local (recommended)"
@echo " make dev-docker - Everything in Docker" @echo " make dev-docker - Backend + frontend in Docker"
@echo " make stop - Stop all services" @echo " make dev-docker-full - Everything in Docker (server + mcp + ui + work orders)"
@echo " make test - Run all tests" @echo " make dev-hybrid-work-orders - Server + MCP in Docker, UI + work orders local (2 terminals)"
@echo " make test-fe - Run frontend tests only" @echo " make dev-work-orders - Backend in Docker, agent work orders local, frontend local"
@echo " make test-be - Run backend tests only" @echo " make agent-work-orders - Run agent work orders service locally"
@echo " make lint - Run all linters" @echo " make stop - Stop all services"
@echo " make lint-fe - Run frontend linter only" @echo " make test - Run all tests"
@echo " make lint-be - Run backend linter only" @echo " make test-fe - Run frontend tests only"
@echo " make clean - Remove containers and volumes" @echo " make test-be - Run backend tests only"
@echo " make install - Install dependencies" @echo " make lint - Run all linters"
@echo " make check - Check environment setup" @echo " make lint-fe - Run frontend linter only"
@echo " make lint-be - Run backend linter only"
@echo " make clean - Remove containers and volumes"
@echo " make install - Install dependencies"
@echo " make check - Check environment setup"
# Install dependencies # Install dependencies
install: install:
@@ -54,18 +58,73 @@ dev: check
VITE_ARCHON_SERVER_HOST=$${HOST:-} \ VITE_ARCHON_SERVER_HOST=$${HOST:-} \
npm run dev npm run dev
# Full Docker development # Full Docker development (backend + frontend, no work orders)
dev-docker: check dev-docker: check
@echo "Starting full Docker environment..." @echo "Starting Docker environment (backend + frontend)..."
@$(COMPOSE) --profile full up -d --build @$(COMPOSE) --profile full up -d --build
@echo "✓ All services running" @echo "✓ Services running"
@echo "Frontend: http://localhost:3737" @echo "Frontend: http://localhost:3737"
@echo "API: http://localhost:8181" @echo "API: http://localhost:8181"
# Full Docker with all services (server + mcp + ui + agent work orders)
dev-docker-full: check
@echo "Starting full Docker environment with agent work orders..."
@$(COMPOSE) up archon-server archon-mcp archon-frontend archon-agent-work-orders -d --build
@set -a; [ -f .env ] && . ./.env; set +a; \
echo "✓ All services running"; \
echo "Frontend: http://localhost:3737"; \
echo "API: http://$${HOST:-localhost}:$${ARCHON_SERVER_PORT:-8181}"; \
echo "MCP: http://$${HOST:-localhost}:$${ARCHON_MCP_PORT:-8051}"; \
echo "Agent Work Orders: http://$${HOST:-localhost}:$${AGENT_WORK_ORDERS_PORT:-8053}"
# Agent work orders service locally (standalone)
agent-work-orders:
@echo "Starting Agent Work Orders service locally..."
@set -a; [ -f .env ] && . ./.env; set +a; \
export SERVICE_DISCOVERY_MODE=local; \
export ARCHON_SERVER_URL=http://localhost:$${ARCHON_SERVER_PORT:-8181}; \
export ARCHON_MCP_URL=http://localhost:$${ARCHON_MCP_PORT:-8051}; \
export AGENT_WORK_ORDERS_PORT=$${AGENT_WORK_ORDERS_PORT:-8053}; \
cd python && uv run python -m uvicorn src.agent_work_orders.server:app --host 0.0.0.0 --port $${AGENT_WORK_ORDERS_PORT:-8053} --reload
# Hybrid development with agent work orders (backend in Docker, agent work orders local, frontend local)
dev-work-orders: check
@echo "Starting hybrid development with agent work orders..."
@echo "Backend: Docker | Agent Work Orders: Local | Frontend: Local"
@$(COMPOSE) up archon-server archon-mcp -d --build
@set -a; [ -f .env ] && . ./.env; set +a; \
echo "Backend running at http://$${HOST:-localhost}:$${ARCHON_SERVER_PORT:-8181}"; \
echo "Starting agent work orders service..."; \
echo "Run in separate terminal: make agent-work-orders"; \
echo "Starting frontend..."; \
cd archon-ui-main && \
VITE_ARCHON_SERVER_PORT=$${ARCHON_SERVER_PORT:-8181} \
VITE_ARCHON_SERVER_HOST=$${HOST:-} \
npm run dev
# Hybrid development: Server + MCP in Docker, UI + Work Orders local (requires 2 terminals)
dev-hybrid-work-orders: check
@echo "Starting hybrid development: Server + MCP in Docker, UI + Work Orders local"
@echo "================================================================"
@$(COMPOSE) up archon-server archon-mcp -d --build
@set -a; [ -f .env ] && . ./.env; set +a; \
echo ""; \
echo "✓ Server + MCP running in Docker"; \
echo " Server: http://$${HOST:-localhost}:$${ARCHON_SERVER_PORT:-8181}"; \
echo " MCP: http://$${HOST:-localhost}:$${ARCHON_MCP_PORT:-8051}"; \
echo ""; \
echo "Next steps:"; \
echo " 1. Terminal 1 (this one): Press Ctrl+C when done"; \
echo " 2. Terminal 2: make agent-work-orders"; \
echo " 3. Terminal 3: cd archon-ui-main && npm run dev"; \
echo ""; \
echo "Or use 'make dev-docker-full' to run everything in Docker."; \
@read -p "Press Enter to continue or Ctrl+C to stop..." _
# Stop all services # Stop all services
stop: stop:
@echo "Stopping all services..." @echo "Stopping all services..."
@$(COMPOSE) --profile backend --profile frontend --profile full down @$(COMPOSE) --profile backend --profile frontend --profile full --profile work-orders down
@echo "✓ Services stopped" @echo "✓ Services stopped"
# Run all tests # Run all tests

View File

@@ -0,0 +1,13 @@
# Frontend Environment Configuration
# Agent Work Orders Service (Optional)
# Only set if agent work orders service runs on different host/port than main server
# Default: Uses proxy through main server at /api/agent-work-orders
# Set to the base URL (without /api/agent-work-orders path)
# VITE_AGENT_WORK_ORDERS_URL=http://localhost:8053
# Development Tools
# Show TanStack Query DevTools (for developers only)
# Set to "true" to enable the DevTools panel in bottom right corner
# Defaults to "false" for end users
VITE_SHOW_DEVTOOLS=false

View File

@@ -25,16 +25,29 @@ export const agentWorkOrderKeys = {
}; };
/** /**
* Hook to fetch list of agent work orders, optionally filtered by status * Hook to fetch list of agent work orders with smart polling
* Automatically polls when any work order is pending or running
* *
* @param statusFilter - Optional status to filter work orders * @param statusFilter - Optional status to filter work orders
* @returns Query result with work orders array * @returns Query result with work orders array
*/ */
export function useWorkOrders(statusFilter?: AgentWorkOrderStatus): UseQueryResult<AgentWorkOrder[], Error> { export function useWorkOrders(statusFilter?: AgentWorkOrderStatus): UseQueryResult<AgentWorkOrder[], Error> {
const refetchInterval = useSmartPolling({
baseInterval: 3000,
enabled: true,
});
return useQuery({ return useQuery({
queryKey: agentWorkOrderKeys.list(statusFilter), queryKey: agentWorkOrderKeys.list(statusFilter),
queryFn: () => agentWorkOrdersService.listWorkOrders(statusFilter), queryFn: () => agentWorkOrdersService.listWorkOrders(statusFilter),
staleTime: STALE_TIMES.frequent, staleTime: STALE_TIMES.instant,
refetchInterval: (query) => {
const data = query.state.data as AgentWorkOrder[] | undefined;
const hasActiveWorkOrders = data?.some(
(wo) => wo.status === "running" || wo.status === "pending"
);
return hasActiveWorkOrders ? refetchInterval : false;
},
}); });
} }

View File

@@ -8,6 +8,21 @@
import { callAPIWithETag } from "@/features/shared/api/apiClient"; import { callAPIWithETag } from "@/features/shared/api/apiClient";
import type { AgentWorkOrder, AgentWorkOrderStatus, CreateAgentWorkOrderRequest, StepHistory } from "../types"; import type { AgentWorkOrder, AgentWorkOrderStatus, CreateAgentWorkOrderRequest, StepHistory } from "../types";
/**
* Get the base URL for agent work orders API
* Defaults to /api/agent-work-orders (proxy through main server)
* Can be overridden with VITE_AGENT_WORK_ORDERS_URL for direct connection
*/
const getBaseUrl = (): string => {
const directUrl = import.meta.env.VITE_AGENT_WORK_ORDERS_URL;
if (directUrl) {
// Direct URL should include the full path
return `${directUrl}/api/agent-work-orders`;
}
// Default: proxy through main server
return "/api/agent-work-orders";
};
export const agentWorkOrdersService = { export const agentWorkOrdersService = {
/** /**
* Create a new agent work order * Create a new agent work order
@@ -17,7 +32,8 @@ export const agentWorkOrdersService = {
* @throws Error if creation fails * @throws Error if creation fails
*/ */
async createWorkOrder(request: CreateAgentWorkOrderRequest): Promise<AgentWorkOrder> { async createWorkOrder(request: CreateAgentWorkOrderRequest): Promise<AgentWorkOrder> {
return await callAPIWithETag<AgentWorkOrder>("/api/agent-work-orders/", { const baseUrl = getBaseUrl();
return await callAPIWithETag<AgentWorkOrder>(`${baseUrl}/`, {
method: "POST", method: "POST",
body: JSON.stringify(request), body: JSON.stringify(request),
}); });
@@ -31,8 +47,9 @@ export const agentWorkOrdersService = {
* @throws Error if request fails * @throws Error if request fails
*/ */
async listWorkOrders(statusFilter?: AgentWorkOrderStatus): Promise<AgentWorkOrder[]> { async listWorkOrders(statusFilter?: AgentWorkOrderStatus): Promise<AgentWorkOrder[]> {
const baseUrl = getBaseUrl();
const params = statusFilter ? `?status=${statusFilter}` : ""; const params = statusFilter ? `?status=${statusFilter}` : "";
return await callAPIWithETag<AgentWorkOrder[]>(`/api/agent-work-orders/${params}`); return await callAPIWithETag<AgentWorkOrder[]>(`${baseUrl}/${params}`);
}, },
/** /**
@@ -43,7 +60,8 @@ export const agentWorkOrdersService = {
* @throws Error if work order not found or request fails * @throws Error if work order not found or request fails
*/ */
async getWorkOrder(id: string): Promise<AgentWorkOrder> { async getWorkOrder(id: string): Promise<AgentWorkOrder> {
return await callAPIWithETag<AgentWorkOrder>(`/api/agent-work-orders/${id}`); const baseUrl = getBaseUrl();
return await callAPIWithETag<AgentWorkOrder>(`${baseUrl}/${id}`);
}, },
/** /**
@@ -54,6 +72,7 @@ export const agentWorkOrdersService = {
* @throws Error if work order not found or request fails * @throws Error if work order not found or request fails
*/ */
async getStepHistory(id: string): Promise<StepHistory> { async getStepHistory(id: string): Promise<StepHistory> {
return await callAPIWithETag<StepHistory>(`/api/agent-work-orders/${id}/steps`); const baseUrl = getBaseUrl();
return await callAPIWithETag<StepHistory>(`${baseUrl}/${id}/steps`);
}, },
}; };

View File

@@ -48,9 +48,12 @@ export function WorkOrderDetailView() {
? workOrder.repository_url.split("/").slice(-2).join("/") ? workOrder.repository_url.split("/").slice(-2).join("/")
: "Unknown Repository"; : "Unknown Repository";
const timeAgo = formatDistanceToNow(new Date(workOrder.created_at), { // Safely handle potentially invalid dates
addSuffix: true, const timeAgo = workOrder.created_at
}); ? formatDistanceToNow(new Date(workOrder.created_at), {
addSuffix: true,
})
: "Unknown";
return ( return (
<div className="container mx-auto px-4 py-8"> <div className="container mx-auto px-4 py-8">

View File

@@ -42,11 +42,18 @@ function buildFullUrl(cleanEndpoint: string): string {
*/ */
export async function callAPIWithETag<T = unknown>(endpoint: string, options: RequestInit = {}): Promise<T> { export async function callAPIWithETag<T = unknown>(endpoint: string, options: RequestInit = {}): Promise<T> {
try { try {
// Clean endpoint // Handle absolute URLs (direct service connections)
const cleanEndpoint = endpoint.startsWith("/api") ? endpoint.substring(4) : endpoint; const isAbsoluteUrl = endpoint.startsWith("http://") || endpoint.startsWith("https://");
// Construct the full URL let fullUrl: string;
const fullUrl = buildFullUrl(cleanEndpoint); if (isAbsoluteUrl) {
// Use absolute URL as-is (for direct service connections)
fullUrl = endpoint;
} else {
// Clean endpoint and build relative URL
const cleanEndpoint = endpoint.startsWith("/api") ? endpoint.substring(4) : endpoint;
fullUrl = buildFullUrl(cleanEndpoint);
}
// Build headers - only set Content-Type for requests with a body // Build headers - only set Content-Type for requests with a body
// NOTE: We do NOT add If-None-Match headers; the browser handles ETag revalidation automatically // NOTE: We do NOT add If-None-Match headers; the browser handles ETag revalidation automatically

View File

@@ -27,6 +27,7 @@ services:
- ARCHON_SERVER_PORT=${ARCHON_SERVER_PORT:-8181} - ARCHON_SERVER_PORT=${ARCHON_SERVER_PORT:-8181}
- ARCHON_MCP_PORT=${ARCHON_MCP_PORT:-8051} - ARCHON_MCP_PORT=${ARCHON_MCP_PORT:-8051}
- ARCHON_AGENTS_PORT=${ARCHON_AGENTS_PORT:-8052} - ARCHON_AGENTS_PORT=${ARCHON_AGENTS_PORT:-8052}
- AGENT_WORK_ORDERS_PORT=${AGENT_WORK_ORDERS_PORT:-8053}
- AGENTS_ENABLED=${AGENTS_ENABLED:-false} - AGENTS_ENABLED=${AGENTS_ENABLED:-false}
- ARCHON_HOST=${HOST:-localhost} - ARCHON_HOST=${HOST:-localhost}
networks: networks:
@@ -146,6 +147,54 @@ services:
retries: 3 retries: 3
start_period: 40s start_period: 40s
# Agent Work Orders Service (Independent microservice for workflow execution)
archon-agent-work-orders:
profiles:
- work-orders # Only starts when explicitly using --profile work-orders
build:
context: ./python
dockerfile: Dockerfile.agent-work-orders
args:
BUILDKIT_INLINE_CACHE: 1
AGENT_WORK_ORDERS_PORT: ${AGENT_WORK_ORDERS_PORT:-8053}
container_name: archon-agent-work-orders
depends_on:
- archon-server
ports:
- "${AGENT_WORK_ORDERS_PORT:-8053}:${AGENT_WORK_ORDERS_PORT:-8053}"
environment:
- SERVICE_DISCOVERY_MODE=docker_compose
- ARCHON_SERVER_URL=http://archon-server:${ARCHON_SERVER_PORT:-8181}
- ARCHON_MCP_URL=http://archon-mcp:${ARCHON_MCP_PORT:-8051}
- SUPABASE_URL=${SUPABASE_URL}
- SUPABASE_SERVICE_KEY=${SUPABASE_SERVICE_KEY}
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- LOGFIRE_TOKEN=${LOGFIRE_TOKEN:-}
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- AGENT_WORK_ORDERS_PORT=${AGENT_WORK_ORDERS_PORT:-8053}
- CLAUDE_CLI_PATH=${CLAUDE_CLI_PATH:-claude}
- GH_CLI_PATH=${GH_CLI_PATH:-gh}
networks:
- app-network
volumes:
- ./python/src/agent_work_orders:/app/src/agent_work_orders # Hot reload for agent work orders
- /tmp/agent-work-orders:/tmp/agent-work-orders # Temp files
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test:
[
"CMD",
"python",
"-c",
'import urllib.request; urllib.request.urlopen("http://localhost:${AGENT_WORK_ORDERS_PORT:-8053}/health")',
]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
# Frontend # Frontend
archon-frontend: archon-frontend:
build: ./archon-ui-main build: ./archon-ui-main

View File

@@ -0,0 +1,77 @@
# Agent Work Orders Service - Independent microservice for agent execution
FROM python:3.12 AS builder
WORKDIR /build
# Install build dependencies and uv
RUN apt-get update && apt-get install -y \
build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --no-cache-dir uv
# Copy pyproject.toml for dependency installation
COPY pyproject.toml .
# Install agent work orders dependencies to a virtual environment using uv
RUN uv venv /venv && \
. /venv/bin/activate && \
uv pip install . --group agent-work-orders
# Runtime stage
FROM python:3.12-slim
WORKDIR /app
# Install runtime dependencies: git, gh CLI, curl
RUN apt-get update && apt-get install -y \
git \
curl \
ca-certificates \
wget \
gnupg \
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | gpg --dearmor -o /usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& apt-get update \
&& apt-get install -y gh \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Copy the virtual environment from builder
COPY --from=builder /venv /venv
# Copy agent work orders source code only (not entire server)
COPY src/agent_work_orders/ src/agent_work_orders/
COPY src/__init__.py src/
# Copy Claude command files for agent work orders
COPY .claude/ .claude/
# Create non-root user for security (Claude CLI blocks --dangerously-skip-permissions with root)
RUN useradd -m -u 1000 -s /bin/bash agentuser && \
chown -R agentuser:agentuser /app /venv
# Create volume mount points for git operations and temp files
RUN mkdir -p /repos /tmp/agent-work-orders && \
chown -R agentuser:agentuser /repos /tmp/agent-work-orders && \
chmod -R 755 /repos /tmp/agent-work-orders
# Install Claude CLI for non-root user
USER agentuser
RUN curl -fsSL https://claude.ai/install.sh | bash
# Set environment variables
ENV PYTHONPATH="/app:$PYTHONPATH"
ENV PYTHONUNBUFFERED=1
ENV PATH="/venv/bin:/home/agentuser/.local/bin:$PATH"
# Expose agent work orders service port
ARG AGENT_WORK_ORDERS_PORT=8053
ENV AGENT_WORK_ORDERS_PORT=${AGENT_WORK_ORDERS_PORT}
EXPOSE ${AGENT_WORK_ORDERS_PORT}
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:${AGENT_WORK_ORDERS_PORT}/health')"
# Run the Agent Work Orders service
CMD python -m uvicorn src.agent_work_orders.server:app --host 0.0.0.0 --port ${AGENT_WORK_ORDERS_PORT}

View File

@@ -87,7 +87,7 @@ mcp = [
"fastapi>=0.104.0", "fastapi>=0.104.0",
] ]
# Agents container dependencies # Agents container dependencies (ML/reranking service)
agents = [ agents = [
"pydantic-ai>=0.0.13", "pydantic-ai>=0.0.13",
"pydantic>=2.0.0", "pydantic>=2.0.0",
@@ -98,6 +98,16 @@ agents = [
"structlog>=23.1.0", "structlog>=23.1.0",
] ]
# Agent Work Orders container dependencies (workflow orchestration service)
agent-work-orders = [
"fastapi>=0.119.1",
"uvicorn>=0.38.0",
"pydantic>=2.12.3",
"httpx>=0.28.1",
"python-dotenv>=1.1.1",
"structlog>=25.4.0",
]
# All dependencies for running unit tests locally # All dependencies for running unit tests locally
# This combines all container dependencies plus test-specific ones # This combines all container dependencies plus test-specific ones
all = [ all = [

View File

@@ -0,0 +1,168 @@
# AI Agent Development Instructions
## Project Overview
agent_work_orders for claude code cli automation stichting modular workflows together
## Core Principles
1. **TYPE SAFETY IS NON-NEGOTIABLE**
- All functions, methods, and variables MUST have type annotations
- Strict mypy configuration is enforced
- No `Any` types without explicit justification
2. **KISS** (Keep It Simple, Stupid)
- Prefer simple, readable solutions over clever abstractions
3. **YAGNI** (You Aren't Gonna Need It)
- Don't build features until they're actually needed
**Architecture:**
```
src/agent_work_orders
```
Each tool is a vertical slice containing tool.py, schemas.py, service.py.
---
## Documentation Style
**Use Google-style docstrings** for all functions, classes, and modules:
```python
def process_request(user_id: str, query: str) -> dict[str, Any]:
"""Process a user request and return results.
Args:
user_id: Unique identifier for the user.
query: The search query string.
Returns:
Dictionary containing results and metadata.
Raises:
ValueError: If query is empty or invalid.
ProcessingError: If processing fails after retries.
"""
```
---
## Logging Rules
**Philosophy:** Logs are optimized for AI agent consumption. Include enough context for an LLM to understand and fix issues without human intervention.
### Required (MUST)
1. **Import shared logger:** from python/src/agent_work_orders/utils/structured_logger.py
2. **Use appropriate levels:** `debug` (diagnostics), `info` (operations), `warning` (recoverable), `error` (non-fatal), `exception` (in except blocks with stack traces)
3. **Use structured logging:** Always use keyword arguments, never string formatting
```python
logger.info("user_created", user_id="123", role="admin") # ✅
logger.info(f"User {user_id} created") # ❌ NO
```
4. **Descriptive event names:** Use `snake_case` that answers "what happened?"
- Good: `database_connection_established`, `tool_execution_started`, `api_request_completed`
- Bad: `connected`, `done`, `success`
5. **Use logger.exception() in except blocks:** Captures full stack trace automatically
```python
try:
result = await operation()
except ValueError:
logger.exception("operation_failed", expected="int", received=type(value).__name__)
raise
```
6. **Include debugging context:** IDs (user_id, request_id, session_id), input values, expected vs actual, external responses, performance metrics (duration_ms)
### Recommended (SHOULD)
- Log entry/exit for complex operations with relevant metadata
- Log performance metrics for bottlenecks (timing, counts)
- Log state transitions (old_state, new_state)
- Log external system interactions (API calls, database queries, tool executions)
### DO NOT
- **DO NOT log sensitive data:** No passwords, API keys, tokens (mask: `api_key[:8] + "..."`)
- **DO NOT use string formatting:** Always use structured kwargs
- **DO NOT spam logs in loops:** Log batch summaries instead
- **DO NOT silently catch exceptions:** Always log with `logger.exception()` or re-raise
- **DO NOT use vague event names:** Be specific about what happened
### Common Patterns
**Tool execution:**
```python
logger.info("tool_execution_started", tool=name, params=params)
try:
result = await tool.execute(params)
logger.info("tool_execution_completed", tool=name, duration_ms=duration)
except ToolError:
logger.exception("tool_execution_failed", tool=name, retry_count=count)
raise
```
**External API calls:**
```python
logger.info("api_call", provider="openai", endpoint="/v1/chat", status=200,
duration_ms=1245.5, tokens={"prompt": 245, "completion": 128})
```
### Debugging
Logs include: `correlation_id` (links request logs), `source` (file:function:line), `duration_ms` (performance), `exc_type/exc_message` (errors). Use `grep "correlation_id=abc-123"` to trace requests.
---
## Development Workflow
**Run server:** `uv run uvicorn src.main:app --host 0.0.0.0 --port 8030 --reload`
**Lint/check (must pass):** `uv run ruff check src/ && uv run mypy src/`
**Auto-fix:** `uv run ruff check --fix src/`
**Run tests:** `uv run pytest tests/ -v`
---
## Testing
**Tests mirror the source directory structure.** Every file in `src/agent_work_orders` MUST have a corresponding test file.
**Structure:**
**Requirements:**
- **Unit tests:** Test individual components in isolation. Mark with `@pytest.mark.unit`
- **Integration tests:** Test multiple components together. Mark with `@pytest.mark.integration`
- Place integration tests in `tests/integration/` when testing full application stack
**Run tests:** `uv run pytest tests/ -v`
**Run specific types:** `uv run pytest tests/ -m unit` or `uv run pytest tests/ -m integration`
---
---
## AI Agent Notes
When debugging:
- Check `source` field for file/function location
- Use `correlation_id` to trace full request flow
- Look for `duration_ms` to identify bottlenecks
- Exception logs include full stack traces with local variables (dev mode)
- All context is in structured log fields—use them to understand and fix issues

View File

@@ -0,0 +1,316 @@
# Agent Work Orders Service
Independent microservice for executing agent-based workflows using Claude Code CLI.
## Purpose
The Agent Work Orders service is a standalone FastAPI application that:
- Executes Claude Code CLI commands for automated development workflows
- Manages git worktrees for isolated execution environments
- Integrates with GitHub for PR creation and management
- Provides a complete workflow orchestration system with 6 compositional commands
## Architecture
This service runs independently from the main Archon server and can be deployed:
- **Locally**: For development using `uv run`
- **Docker**: As a standalone container
- **Hybrid**: Mix of local and Docker services
### Service Communication
The agent service communicates with:
- **Archon Server** (`http://archon-server:8181` or `http://localhost:8181`)
- **Archon MCP** (`http://archon-mcp:8051` or `http://localhost:8051`)
Service discovery is automatic based on `SERVICE_DISCOVERY_MODE`:
- `local`: Uses localhost URLs
- `docker_compose`: Uses Docker container names
## Running Locally
### Prerequisites
- Python 3.12+
- Claude Code CLI installed (`curl -fsSL https://claude.ai/install.sh | bash`)
- Git and GitHub CLI (`gh`)
- uv package manager
### Quick Start
```bash
# Using make (recommended)
make agent-work-orders
# Or using the provided script
cd python
./scripts/start-agent-service.sh
# Or manually
export SERVICE_DISCOVERY_MODE=local
export ARCHON_SERVER_URL=http://localhost:8181
export ARCHON_MCP_URL=http://localhost:8051
uv run python -m uvicorn src.agent_work_orders.server:app --port 8053 --reload
```
## Running with Docker
### Build and Run
```bash
# Build the Docker image
cd python
docker build -f Dockerfile.agent-work-orders -t archon-agent-work-orders .
# Run the container
docker run -p 8053:8053 \
-e SERVICE_DISCOVERY_MODE=local \
-e ARCHON_SERVER_URL=http://localhost:8181 \
archon-agent-work-orders
```
### Docker Compose
```bash
# Start with agent work orders service profile
docker compose --profile work-orders up -d
# Or include in default services (edit docker-compose.yml to remove profile)
docker compose up -d
```
## Configuration
### Environment Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `AGENT_WORK_ORDERS_PORT` | `8053` | Port for agent work orders service |
| `SERVICE_DISCOVERY_MODE` | `local` | Service discovery mode (`local` or `docker_compose`) |
| `ARCHON_SERVER_URL` | Auto | Main server URL (auto-configured by discovery mode) |
| `ARCHON_MCP_URL` | Auto | MCP server URL (auto-configured by discovery mode) |
| `CLAUDE_CLI_PATH` | `claude` | Path to Claude CLI executable |
| `GH_CLI_PATH` | `gh` | Path to GitHub CLI executable |
| `LOG_LEVEL` | `INFO` | Logging level |
| `STATE_STORAGE_TYPE` | `memory` | State storage (`memory` or `file`) - Use `file` for persistence |
| `FILE_STATE_DIRECTORY` | `agent-work-orders-state` | Directory for file-based state (when `STATE_STORAGE_TYPE=file`) |
### Service Discovery Modes
**Local Mode** (`SERVICE_DISCOVERY_MODE=local`):
- Default for development
- Services on `localhost` with different ports
- Ideal for mixed local/Docker setup
**Docker Compose Mode** (`SERVICE_DISCOVERY_MODE=docker_compose`):
- Automatic in Docker deployments
- Uses container names for service discovery
- All services in same Docker network
## API Endpoints
### Core Endpoints
- `GET /health` - Health check with dependency validation
- `GET /` - Service information
- `GET /docs` - OpenAPI documentation
### Work Order Endpoints
All endpoints under `/api/agent-work-orders`:
- `POST /` - Create new work order
- `GET /` - List all work orders (optional status filter)
- `GET /{id}` - Get specific work order
- `GET /{id}/steps` - Get step execution history
## Development Workflows
### Hybrid (Recommended - Backend in Docker, Agent Work Orders Local)
```bash
# Terminal 1: Start backend in Docker and frontend
make dev-work-orders
# Terminal 2: Start agent work orders service
make agent-work-orders
```
### All Local (3 terminals)
```bash
# Terminal 1: Backend
cd python
uv run python -m uvicorn src.server.main:app --port 8181 --reload
# Terminal 2: Agent Work Orders Service
make agent-work-orders
# Terminal 3: Frontend
cd archon-ui-main
npm run dev
```
### Full Docker
```bash
# All services in Docker
docker compose --profile work-orders up -d
# View agent work orders service logs
docker compose logs -f archon-agent-work-orders
```
## Troubleshooting
### Claude CLI Not Found
```bash
# Install Claude Code CLI
curl -fsSL https://claude.ai/install.sh | bash
# Verify installation
claude --version
```
### Service Connection Errors
Check health endpoint to see dependency status:
```bash
curl http://localhost:8052/health
```
This shows:
- Claude CLI availability
- Git availability
- Archon server connectivity
- MCP server connectivity
### Port Conflicts
If port 8053 is in use:
```bash
# Change port
export AGENT_WORK_ORDERS_PORT=9053
./scripts/start-agent-service.sh
```
### Docker Service Discovery
If services can't reach each other in Docker:
```bash
# Verify network
docker network inspect archon_app-network
# Test connectivity
docker exec archon-agent-work-orders ping archon-server
docker exec archon-agent-work-orders curl http://archon-server:8181/health
```
## Testing
### Unit Tests
```bash
cd python
uv run pytest tests/agent_work_orders/ -m unit -v
```
### Integration Tests
```bash
uv run pytest tests/integration/test_agent_service_communication.py -v
```
### Manual Testing
```bash
# Create a work order
curl -X POST http://localhost:8053/api/agent-work-orders/ \
-H "Content-Type: application/json" \
-d '{
"repository_url": "https://github.com/test/repo",
"sandbox_type": "worktree",
"user_request": "Fix authentication bug",
"selected_commands": ["create-branch", "planning"]
}'
# List work orders
curl http://localhost:8053/api/agent-work-orders/
# Get specific work order
curl http://localhost:8053/api/agent-work-orders/<id>
```
## Monitoring
### Health Checks
The `/health` endpoint provides detailed status:
```json
{
"status": "healthy",
"service": "agent-work-orders",
"version": "0.1.0",
"dependencies": {
"claude_cli": { "available": true, "version": "2.0.21" },
"git": { "available": true },
"archon_server": { "available": true, "url": "..." },
"archon_mcp": { "available": true, "url": "..." }
}
}
```
### Logs
Structured logging with context:
```bash
# Docker logs
docker compose logs -f archon-agent-work-orders
# Local logs (stdout)
# Already visible in terminal running the service
```
## Architecture Details
### Dependencies
- **FastAPI**: Web framework
- **httpx**: HTTP client for service communication
- **Claude Code CLI**: Agent execution
- **Git**: Repository operations
- **GitHub CLI**: PR management
### File Structure
```
src/agent_work_orders/
├── server.py # Standalone server entry point
├── main.py # Legacy FastAPI app (deprecated)
├── config.py # Configuration management
├── api/
│ └── routes.py # API route handlers
├── agent_executor/ # Claude CLI execution
├── workflow_engine/ # Workflow orchestration
├── sandbox_manager/ # Git worktree management
└── github_integration/ # GitHub operations
```
## Future Improvements
- Claude Agent SDK migration (replace CLI with Python SDK)
- Direct MCP tool integration
- Multiple instance scaling with load balancing
- Prometheus metrics and distributed tracing
- WebSocket support for real-time log streaming
- Queue system (RabbitMQ/Redis) for work order management

View File

@@ -317,16 +317,25 @@ async def get_agent_work_order_steps(agent_work_order_id: str) -> StepHistory:
Returns detailed history of each step executed, Returns detailed history of each step executed,
including success/failure, duration, and errors. including success/failure, duration, and errors.
Returns empty history if work order exists but has no steps yet.
""" """
logger.info("agent_step_history_get_started", agent_work_order_id=agent_work_order_id) logger.info("agent_step_history_get_started", agent_work_order_id=agent_work_order_id)
try: try:
# First check if work order exists
result = await state_repository.get(agent_work_order_id)
if not result:
raise HTTPException(status_code=404, detail="Work order not found")
step_history = await state_repository.get_step_history(agent_work_order_id) step_history = await state_repository.get_step_history(agent_work_order_id)
if not step_history: if not step_history:
raise HTTPException( # Work order exists but no steps yet - return empty history
status_code=404, detail=f"Step history not found for work order {agent_work_order_id}" logger.info(
"agent_step_history_empty",
agent_work_order_id=agent_work_order_id,
) )
return StepHistory(agent_work_order_id=agent_work_order_id, steps=[])
logger.info( logger.info(
"agent_step_history_get_completed", "agent_step_history_get_completed",

View File

@@ -29,6 +29,12 @@ class AgentWorkOrdersConfig:
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
GH_CLI_PATH: str = os.getenv("GH_CLI_PATH", "gh") GH_CLI_PATH: str = os.getenv("GH_CLI_PATH", "gh")
# Service discovery configuration
SERVICE_DISCOVERY_MODE: str = os.getenv("SERVICE_DISCOVERY_MODE", "local")
# CORS configuration
CORS_ORIGINS: str = os.getenv("CORS_ORIGINS", "http://localhost:3737,http://host.docker.internal:3737,*")
# Claude CLI flags configuration # Claude CLI flags configuration
# --verbose: Required when using --print with --output-format=stream-json # --verbose: Required when using --print with --output-format=stream-json
CLAUDE_CLI_VERBOSE: bool = os.getenv("CLAUDE_CLI_VERBOSE", "true").lower() == "true" CLAUDE_CLI_VERBOSE: bool = os.getenv("CLAUDE_CLI_VERBOSE", "true").lower() == "true"
@@ -69,6 +75,32 @@ class AgentWorkOrdersConfig:
temp_dir.mkdir(parents=True, exist_ok=True) temp_dir.mkdir(parents=True, exist_ok=True)
return temp_dir return temp_dir
@classmethod
def get_archon_server_url(cls) -> str:
"""Get Archon server URL based on service discovery mode"""
# Allow explicit override
explicit_url = os.getenv("ARCHON_SERVER_URL")
if explicit_url:
return explicit_url
# Otherwise use service discovery mode
if cls.SERVICE_DISCOVERY_MODE == "docker_compose":
return "http://archon-server:8181"
return "http://localhost:8181"
@classmethod
def get_archon_mcp_url(cls) -> str:
"""Get Archon MCP server URL based on service discovery mode"""
# Allow explicit override
explicit_url = os.getenv("ARCHON_MCP_URL")
if explicit_url:
return explicit_url
# Otherwise use service discovery mode
if cls.SERVICE_DISCOVERY_MODE == "docker_compose":
return "http://archon-mcp:8051"
return "http://localhost:8051"
# Global config instance # Global config instance
config = AgentWorkOrdersConfig() config = AgentWorkOrdersConfig()

View File

@@ -0,0 +1,214 @@
"""Standalone Server Entry Point
FastAPI server for independent agent work order service.
"""
import os
import shutil
import subprocess
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from typing import Any
import httpx
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .api.routes import router
from .config import config
from .utils.structured_logger import configure_structured_logging, get_logger
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
"""Lifespan context manager for startup and shutdown tasks"""
logger = get_logger(__name__)
logger.info(
"Starting Agent Work Orders service",
extra={
"port": os.getenv("AGENT_WORK_ORDERS_PORT", "8053"),
"service_discovery_mode": os.getenv("SERVICE_DISCOVERY_MODE", "local"),
},
)
# Validate Claude CLI is available
try:
result = subprocess.run(
[config.CLAUDE_CLI_PATH, "--version"],
capture_output=True,
text=True,
timeout=5,
)
if result.returncode == 0:
logger.info(
"Claude CLI validation successful",
extra={"version": result.stdout.strip()},
)
else:
logger.error(
"Claude CLI validation failed",
extra={"error": result.stderr},
)
except FileNotFoundError:
logger.error(
"Claude CLI not found",
extra={"path": config.CLAUDE_CLI_PATH},
)
except Exception as e:
logger.error(
"Claude CLI validation error",
extra={"error": str(e)},
)
# Validate git is available
if not shutil.which("git"):
logger.error("Git not found in PATH")
else:
logger.info("Git validation successful")
# Log service URLs
archon_server_url = os.getenv("ARCHON_SERVER_URL")
archon_mcp_url = os.getenv("ARCHON_MCP_URL")
if archon_server_url:
logger.info(
"Service discovery configured",
extra={
"archon_server_url": archon_server_url,
"archon_mcp_url": archon_mcp_url,
},
)
yield
logger.info("Shutting down Agent Work Orders service")
# Configure logging on startup
configure_structured_logging(config.LOG_LEVEL)
# Create FastAPI app with lifespan
app = FastAPI(
title="Agent Work Orders API",
description="Independent agent work order service for workflow-based agent execution",
version="0.1.0",
lifespan=lifespan,
)
# CORS middleware with permissive settings for development
cors_origins = os.getenv("CORS_ORIGINS", "*").split(",")
app.add_middleware(
CORSMiddleware,
allow_origins=cors_origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routes with /api/agent-work-orders prefix
app.include_router(router, prefix="/api/agent-work-orders")
@app.get("/health")
async def health_check() -> dict[str, Any]:
"""Health check endpoint with dependency validation"""
health_status: dict[str, Any] = {
"status": "healthy",
"service": "agent-work-orders",
"version": "0.1.0",
"dependencies": {},
}
# Check Claude CLI
try:
result = subprocess.run(
[config.CLAUDE_CLI_PATH, "--version"],
capture_output=True,
text=True,
timeout=5,
)
health_status["dependencies"]["claude_cli"] = {
"available": result.returncode == 0,
"version": result.stdout.strip() if result.returncode == 0 else None,
}
except Exception as e:
health_status["dependencies"]["claude_cli"] = {
"available": False,
"error": str(e),
}
# Check git
health_status["dependencies"]["git"] = {
"available": shutil.which("git") is not None,
}
# Check Archon server connectivity (if configured)
archon_server_url = os.getenv("ARCHON_SERVER_URL")
if archon_server_url:
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(f"{archon_server_url}/health")
health_status["dependencies"]["archon_server"] = {
"available": response.status_code == 200,
"url": archon_server_url,
}
except Exception as e:
health_status["dependencies"]["archon_server"] = {
"available": False,
"url": archon_server_url,
"error": str(e),
}
# Check MCP server connectivity (if configured)
archon_mcp_url = os.getenv("ARCHON_MCP_URL")
if archon_mcp_url:
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(f"{archon_mcp_url}/health")
health_status["dependencies"]["archon_mcp"] = {
"available": response.status_code == 200,
"url": archon_mcp_url,
}
except Exception as e:
health_status["dependencies"]["archon_mcp"] = {
"available": False,
"url": archon_mcp_url,
"error": str(e),
}
# Determine overall status
critical_deps_ok = (
health_status["dependencies"].get("claude_cli", {}).get("available", False)
and health_status["dependencies"].get("git", {}).get("available", False)
)
if not critical_deps_ok:
health_status["status"] = "degraded"
return health_status
@app.get("/")
async def root() -> dict:
"""Root endpoint with service information"""
return {
"service": "agent-work-orders",
"version": "0.1.0",
"description": "Independent agent work order service",
"docs": "/docs",
"health": "/health",
"api": "/api/agent-work-orders",
}
if __name__ == "__main__":
import uvicorn
port = int(os.getenv("AGENT_WORK_ORDERS_PORT", "8053"))
uvicorn.run(
"src.agent_work_orders.server:app",
host="0.0.0.0",
port=port,
reload=True,
)

View File

@@ -15,6 +15,7 @@ from ..models import (
from ..sandbox_manager.sandbox_factory import SandboxFactory from ..sandbox_manager.sandbox_factory import SandboxFactory
from ..state_manager.file_state_repository import FileStateRepository from ..state_manager.file_state_repository import FileStateRepository
from ..state_manager.work_order_repository import WorkOrderRepository from ..state_manager.work_order_repository import WorkOrderRepository
from ..utils.git_operations import get_commit_count, get_files_changed
from ..utils.id_generator import generate_sandbox_identifier from ..utils.id_generator import generate_sandbox_identifier
from ..utils.structured_logger import get_logger from ..utils.structured_logger import get_logger
from . import workflow_operations from . import workflow_operations
@@ -158,16 +159,44 @@ class WorkflowOrchestrator:
agent_work_order_id, result.output or "" agent_work_order_id, result.output or ""
) )
elif command_name == "create-pr": elif command_name == "create-pr":
# Calculate git stats before marking as completed
# Branch name is stored in context from create-branch step
branch_name = context.get("create-branch")
git_stats = await self._calculate_git_stats(
branch_name,
sandbox.get_working_directory()
)
await self.state_repository.update_status( await self.state_repository.update_status(
agent_work_order_id, agent_work_order_id,
AgentWorkOrderStatus.COMPLETED, AgentWorkOrderStatus.COMPLETED,
github_pull_request_url=result.output, github_pull_request_url=result.output,
git_commit_count=git_stats["commit_count"],
git_files_changed=git_stats["files_changed"],
) )
# Save final step history # Save final step history
await self.state_repository.save_step_history(agent_work_order_id, step_history) await self.state_repository.save_step_history(agent_work_order_id, step_history)
bound_logger.info("agent_work_order_completed", total_steps=len(step_history.steps)) bound_logger.info(
"agent_work_order_completed",
total_steps=len(step_history.steps),
git_commit_count=git_stats["commit_count"],
git_files_changed=git_stats["files_changed"],
)
return # Exit early if PR created return # Exit early if PR created
# Calculate git stats for workflows that complete without PR
branch_name = context.get("create-branch")
if branch_name:
git_stats = await self._calculate_git_stats(
branch_name, sandbox.get_working_directory()
)
await self.state_repository.update_status(
agent_work_order_id,
AgentWorkOrderStatus.COMPLETED,
git_commit_count=git_stats["commit_count"],
git_files_changed=git_stats["files_changed"],
)
# Save final step history # Save final step history
await self.state_repository.save_step_history(agent_work_order_id, step_history) await self.state_repository.save_step_history(agent_work_order_id, step_history)
bound_logger.info("agent_work_order_completed", total_steps=len(step_history.steps)) bound_logger.info("agent_work_order_completed", total_steps=len(step_history.steps))
@@ -197,3 +226,35 @@ class WorkflowOrchestrator:
error=str(cleanup_error), error=str(cleanup_error),
exc_info=True, exc_info=True,
) )
async def _calculate_git_stats(
self, branch_name: str | None, repo_path: str
) -> dict[str, int]:
"""Calculate git statistics for a branch
Args:
branch_name: Name of the git branch
repo_path: Path to the repository
Returns:
Dictionary with commit_count and files_changed
"""
if not branch_name:
return {"commit_count": 0, "files_changed": 0}
try:
# Calculate stats compared to main branch
commit_count = await get_commit_count(branch_name, repo_path)
files_changed = await get_files_changed(branch_name, repo_path, base_branch="main")
return {
"commit_count": commit_count,
"files_changed": files_changed,
}
except Exception as e:
logger.warning(
"git_stats_calculation_failed",
branch_name=branch_name,
error=str(e),
)
return {"commit_count": 0, "files_changed": 0}

View File

@@ -0,0 +1,141 @@
"""Agent Work Orders API Gateway Proxy
Proxies requests from the main API to the independent agent work orders service.
This provides a single API entry point for the frontend while maintaining service independence.
"""
import logging
import httpx
from fastapi import APIRouter, HTTPException, Request, Response
from ..config.service_discovery import get_agent_work_orders_url
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/agent-work-orders", tags=["agent-work-orders"])
@router.api_route(
"/{path:path}",
methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
response_class=Response,
)
async def proxy_to_agent_work_orders(request: Request, path: str = "") -> Response:
"""Proxy all requests to the agent work orders microservice.
This acts as an API gateway, forwarding requests to the independent
agent work orders service while maintaining a single API entry point.
Args:
request: The incoming HTTP request
path: The path segment to proxy (captured from URL)
Returns:
Response from the agent work orders service with preserved headers and status
Raises:
HTTPException: 503 if service unavailable, 504 if timeout, 500 for other errors
"""
# Get service URL from service discovery (outside try block for error handlers)
service_url = get_agent_work_orders_url()
try:
# Build target URL
target_path = f"/api/agent-work-orders/{path}" if path else "/api/agent-work-orders/"
target_url = f"{service_url}{target_path}"
# Preserve query parameters
query_string = str(request.url.query) if request.url.query else ""
if query_string:
target_url = f"{target_url}?{query_string}"
# Read request body
body = await request.body()
# Prepare headers (exclude host and connection headers)
headers = {
key: value
for key, value in request.headers.items()
if key.lower() not in ["host", "connection"]
}
logger.debug(
f"Proxying {request.method} {request.url.path} to {target_url}",
extra={
"method": request.method,
"source_path": request.url.path,
"target_url": target_url,
"query_params": query_string,
},
)
# Forward request to agent work orders service
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.request(
method=request.method,
url=target_url,
content=body if body else None,
headers=headers,
)
logger.debug(
f"Proxy response: {response.status_code}",
extra={
"status_code": response.status_code,
"target_url": target_url,
},
)
# Return response with preserved headers and status
return Response(
content=response.content,
status_code=response.status_code,
headers=dict(response.headers),
media_type=response.headers.get("content-type"),
)
except httpx.ConnectError as e:
logger.error(
f"Agent work orders service unavailable at {service_url}",
extra={
"error": str(e),
"service_url": service_url,
},
exc_info=True,
)
raise HTTPException(
status_code=503,
detail="Agent work orders service is currently unavailable",
) from e
except httpx.TimeoutException as e:
logger.error(
f"Agent work orders service timeout",
extra={
"error": str(e),
"service_url": service_url,
"target_url": target_url,
},
exc_info=True,
)
raise HTTPException(
status_code=504,
detail="Agent work orders service request timed out",
) from e
except Exception as e:
logger.error(
f"Error proxying to agent work orders service",
extra={
"error": str(e),
"service_url": service_url,
"method": request.method,
"path": request.url.path,
},
exc_info=True,
)
raise HTTPException(
status_code=500,
detail="Internal server error while contacting agent work orders service",
) from e

View File

@@ -32,6 +32,7 @@ class ServiceDiscovery:
server_port = os.getenv("ARCHON_SERVER_PORT") server_port = os.getenv("ARCHON_SERVER_PORT")
mcp_port = os.getenv("ARCHON_MCP_PORT") mcp_port = os.getenv("ARCHON_MCP_PORT")
agents_port = os.getenv("ARCHON_AGENTS_PORT") agents_port = os.getenv("ARCHON_AGENTS_PORT")
agent_work_orders_port = os.getenv("AGENT_WORK_ORDERS_PORT")
if not server_port: if not server_port:
raise ValueError( raise ValueError(
@@ -51,11 +52,18 @@ class ServiceDiscovery:
"Please set it in your .env file or environment. " "Please set it in your .env file or environment. "
"Default value: 8052" "Default value: 8052"
) )
if not agent_work_orders_port:
raise ValueError(
"AGENT_WORK_ORDERS_PORT environment variable is required. "
"Please set it in your .env file or environment. "
"Default value: 8053"
)
self.DEFAULT_PORTS = { self.DEFAULT_PORTS = {
"api": int(server_port), "api": int(server_port),
"mcp": int(mcp_port), "mcp": int(mcp_port),
"agents": int(agents_port), "agents": int(agents_port),
"agent_work_orders": int(agent_work_orders_port),
} }
self.environment = self._detect_environment() self.environment = self._detect_environment()
@@ -66,9 +74,11 @@ class ServiceDiscovery:
"api": "archon-server", "api": "archon-server",
"mcp": "archon-mcp", "mcp": "archon-mcp",
"agents": "archon-agents", "agents": "archon-agents",
"agent_work_orders": "archon-agent-work-orders",
"archon-server": "archon-server", "archon-server": "archon-server",
"archon-mcp": "archon-mcp", "archon-mcp": "archon-mcp",
"archon-agents": "archon-agents", "archon-agents": "archon-agents",
"archon-agent-work-orders": "archon-agent-work-orders",
} }
@staticmethod @staticmethod
@@ -225,6 +235,11 @@ def get_agents_url() -> str:
return get_discovery().get_service_url("agents") return get_discovery().get_service_url("agents")
def get_agent_work_orders_url() -> str:
"""Get the Agent Work Orders service URL"""
return get_discovery().get_service_url("agent_work_orders")
async def is_service_healthy(service: str) -> bool: async def is_service_healthy(service: str) -> bool:
"""Check if a service is healthy""" """Check if a service is healthy"""
return await get_discovery().health_check(service) return await get_discovery().health_check(service)
@@ -238,5 +253,6 @@ __all__ = [
"get_api_url", "get_api_url",
"get_mcp_url", "get_mcp_url",
"get_agents_url", "get_agents_url",
"get_agent_work_orders_url",
"is_service_healthy", "is_service_healthy",
] ]

View File

@@ -19,6 +19,7 @@ from fastapi import FastAPI, Response
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from .api_routes.agent_chat_api import router as agent_chat_router from .api_routes.agent_chat_api import router as agent_chat_router
from .api_routes.agent_work_orders_proxy import router as agent_work_orders_router
from .api_routes.bug_report_api import router as bug_report_router from .api_routes.bug_report_api import router as bug_report_router
from .api_routes.internal_api import router as internal_router from .api_routes.internal_api import router as internal_router
from .api_routes.knowledge_api import router as knowledge_router from .api_routes.knowledge_api import router as knowledge_router
@@ -189,17 +190,13 @@ app.include_router(ollama_router)
app.include_router(projects_router) app.include_router(projects_router)
app.include_router(progress_router) app.include_router(progress_router)
app.include_router(agent_chat_router) app.include_router(agent_chat_router)
app.include_router(agent_work_orders_router) # Proxy to independent agent work orders service
app.include_router(internal_router) app.include_router(internal_router)
app.include_router(bug_report_router) app.include_router(bug_report_router)
app.include_router(providers_router) app.include_router(providers_router)
app.include_router(version_router) app.include_router(version_router)
app.include_router(migration_router) app.include_router(migration_router)
# Mount Agent Work Orders sub-application
from src.agent_work_orders.main import app as agent_work_orders_app
app.mount("/api/agent-work-orders", agent_work_orders_app)
# Root endpoint # Root endpoint
@app.get("/") @app.get("/")

View File

@@ -0,0 +1,161 @@
"""Tests for agent work orders configuration
Tests configuration loading, service discovery, and URL construction.
"""
import pytest
from unittest.mock import patch
@pytest.mark.unit
def test_config_default_values():
"""Test configuration default values"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.CLAUDE_CLI_PATH == "claude"
assert config.GH_CLI_PATH == "gh"
assert config.EXECUTION_TIMEOUT == 3600
assert config.LOG_LEVEL == "INFO"
assert config.SERVICE_DISCOVERY_MODE == "local"
@pytest.mark.unit
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "local"})
def test_config_local_service_discovery():
"""Test local service discovery mode"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.SERVICE_DISCOVERY_MODE == "local"
assert config.get_archon_server_url() == "http://localhost:8181"
assert config.get_archon_mcp_url() == "http://localhost:8051"
@pytest.mark.unit
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
def test_config_docker_service_discovery():
"""Test docker_compose service discovery mode"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.SERVICE_DISCOVERY_MODE == "docker_compose"
assert config.get_archon_server_url() == "http://archon-server:8181"
assert config.get_archon_mcp_url() == "http://archon-mcp:8051"
@pytest.mark.unit
@patch.dict("os.environ", {"ARCHON_SERVER_URL": "http://custom-server:9999"})
def test_config_explicit_server_url_override():
"""Test explicit ARCHON_SERVER_URL overrides service discovery"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.get_archon_server_url() == "http://custom-server:9999"
@pytest.mark.unit
@patch.dict("os.environ", {"ARCHON_MCP_URL": "http://custom-mcp:7777"})
def test_config_explicit_mcp_url_override():
"""Test explicit ARCHON_MCP_URL overrides service discovery"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.get_archon_mcp_url() == "http://custom-mcp:7777"
@pytest.mark.unit
@patch.dict("os.environ", {"CLAUDE_CLI_PATH": "/custom/path/to/claude"})
def test_config_claude_cli_path_override():
"""Test CLAUDE_CLI_PATH can be overridden"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.CLAUDE_CLI_PATH == "/custom/path/to/claude"
@pytest.mark.unit
@patch.dict("os.environ", {"LOG_LEVEL": "DEBUG"})
def test_config_log_level_override():
"""Test LOG_LEVEL can be overridden"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.LOG_LEVEL == "DEBUG"
@pytest.mark.unit
@patch.dict("os.environ", {"CORS_ORIGINS": "http://example.com,http://test.com"})
def test_config_cors_origins_override():
"""Test CORS_ORIGINS can be overridden"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.CORS_ORIGINS == "http://example.com,http://test.com"
@pytest.mark.unit
def test_config_ensure_temp_dir(tmp_path):
"""Test ensure_temp_dir creates directory"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
import os
# Use tmp_path for testing
test_temp_dir = str(tmp_path / "test-agent-work-orders")
with patch.dict("os.environ", {"AGENT_WORK_ORDER_TEMP_DIR": test_temp_dir}):
config = AgentWorkOrdersConfig()
temp_dir = config.ensure_temp_dir()
assert temp_dir.exists()
assert temp_dir.is_dir()
assert str(temp_dir) == test_temp_dir
@pytest.mark.unit
@patch.dict(
"os.environ",
{
"SERVICE_DISCOVERY_MODE": "docker_compose",
"ARCHON_SERVER_URL": "http://explicit-server:8888",
},
)
def test_config_explicit_url_overrides_discovery_mode():
"""Test explicit URL takes precedence over service discovery mode"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
# Even in docker_compose mode, explicit URL should win
assert config.SERVICE_DISCOVERY_MODE == "docker_compose"
assert config.get_archon_server_url() == "http://explicit-server:8888"
@pytest.mark.unit
@patch.dict("os.environ", {"STATE_STORAGE_TYPE": "file"})
def test_config_state_storage_type():
"""Test STATE_STORAGE_TYPE configuration"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.STATE_STORAGE_TYPE == "file"
@pytest.mark.unit
@patch.dict("os.environ", {"FILE_STATE_DIRECTORY": "/custom/state/dir"})
def test_config_file_state_directory():
"""Test FILE_STATE_DIRECTORY configuration"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
config = AgentWorkOrdersConfig()
assert config.FILE_STATE_DIRECTORY == "/custom/state/dir"

View File

@@ -0,0 +1,199 @@
"""Tests for standalone agent work orders server
Tests the server entry point, health checks, and service discovery configuration.
"""
import pytest
from unittest.mock import Mock, patch, AsyncMock
from fastapi.testclient import TestClient
@pytest.mark.unit
def test_server_health_endpoint():
"""Test health check endpoint returns correct structure"""
from src.agent_work_orders.server import app
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert data["service"] == "agent-work-orders"
assert data["version"] == "0.1.0"
assert "status" in data
assert "dependencies" in data
@pytest.mark.unit
def test_server_root_endpoint():
"""Test root endpoint returns service information"""
from src.agent_work_orders.server import app
client = TestClient(app)
response = client.get("/")
assert response.status_code == 200
data = response.json()
assert data["service"] == "agent-work-orders"
assert data["version"] == "0.1.0"
assert "docs" in data
assert "health" in data
assert "api" in data
@pytest.mark.unit
@patch("src.agent_work_orders.server.subprocess.run")
def test_health_check_claude_cli_available(mock_run):
"""Test health check detects Claude CLI availability"""
from src.agent_work_orders.server import app
# Mock successful Claude CLI execution
mock_run.return_value = Mock(returncode=0, stdout="2.0.21\n", stderr="")
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert data["dependencies"]["claude_cli"]["available"] is True
assert "version" in data["dependencies"]["claude_cli"]
@pytest.mark.unit
@patch("src.agent_work_orders.server.subprocess.run")
def test_health_check_claude_cli_unavailable(mock_run):
"""Test health check handles missing Claude CLI"""
from src.agent_work_orders.server import app
# Mock Claude CLI not found
mock_run.side_effect = FileNotFoundError("claude not found")
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert data["dependencies"]["claude_cli"]["available"] is False
assert "error" in data["dependencies"]["claude_cli"]
@pytest.mark.unit
@patch("src.agent_work_orders.server.shutil.which")
def test_health_check_git_availability(mock_which):
"""Test health check detects git availability"""
from src.agent_work_orders.server import app
# Mock git available
mock_which.return_value = "/usr/bin/git"
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert data["dependencies"]["git"]["available"] is True
@pytest.mark.unit
@patch("src.agent_work_orders.server.httpx.AsyncClient")
@patch.dict("os.environ", {"ARCHON_SERVER_URL": "http://localhost:8181"})
async def test_health_check_server_connectivity(mock_client_class):
"""Test health check validates server connectivity"""
from src.agent_work_orders.server import health_check
# Mock successful server response
mock_response = Mock(status_code=200)
mock_client = AsyncMock()
mock_client.get.return_value = mock_response
mock_client_class.return_value.__aenter__.return_value = mock_client
result = await health_check()
assert result["dependencies"]["archon_server"]["available"] is True
assert result["dependencies"]["archon_server"]["url"] == "http://localhost:8181"
@pytest.mark.unit
@patch("src.agent_work_orders.server.httpx.AsyncClient")
@patch.dict("os.environ", {"ARCHON_MCP_URL": "http://localhost:8051"})
async def test_health_check_mcp_connectivity(mock_client_class):
"""Test health check validates MCP connectivity"""
from src.agent_work_orders.server import health_check
# Mock successful MCP response
mock_response = Mock(status_code=200)
mock_client = AsyncMock()
mock_client.get.return_value = mock_response
mock_client_class.return_value.__aenter__.return_value = mock_client
result = await health_check()
assert result["dependencies"]["archon_mcp"]["available"] is True
assert result["dependencies"]["archon_mcp"]["url"] == "http://localhost:8051"
@pytest.mark.unit
@patch("src.agent_work_orders.server.httpx.AsyncClient")
@patch.dict("os.environ", {"ARCHON_SERVER_URL": "http://localhost:8181"})
async def test_health_check_server_unavailable(mock_client_class):
"""Test health check handles unavailable server"""
from src.agent_work_orders.server import health_check
# Mock connection error
mock_client = AsyncMock()
mock_client.get.side_effect = Exception("Connection refused")
mock_client_class.return_value.__aenter__.return_value = mock_client
result = await health_check()
assert result["dependencies"]["archon_server"]["available"] is False
assert "error" in result["dependencies"]["archon_server"]
@pytest.mark.unit
def test_cors_middleware_configured():
"""Test CORS middleware is properly configured"""
from src.agent_work_orders.server import app
# Check CORS middleware is in middleware stack
middleware_classes = [m.cls.__name__ for m in app.user_middleware]
assert "CORSMiddleware" in middleware_classes
@pytest.mark.unit
def test_router_included_with_prefix():
"""Test API routes are included with correct prefix"""
from src.agent_work_orders.server import app
# Check routes are mounted with /api/agent-work-orders prefix
routes = [route.path for route in app.routes]
assert any("/api/agent-work-orders" in route for route in routes)
@pytest.mark.unit
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "local"})
def test_startup_logs_local_mode(caplog):
"""Test startup logs service discovery mode"""
from src.agent_work_orders.server import app
from src.agent_work_orders.config import config
# Verify config is set to local mode
assert config.SERVICE_DISCOVERY_MODE == "local"
@pytest.mark.unit
@patch.dict("os.environ", {"SERVICE_DISCOVERY_MODE": "docker_compose"})
def test_startup_logs_docker_mode(caplog):
"""Test startup logs docker_compose mode"""
from src.agent_work_orders.config import AgentWorkOrdersConfig
# Create fresh config instance with env var
config = AgentWorkOrdersConfig()
# Verify config is set to docker_compose mode
assert config.SERVICE_DISCOVERY_MODE == "docker_compose"

143
python/uv.lock generated
View File

@@ -168,6 +168,14 @@ dependencies = [
] ]
[package.dev-dependencies] [package.dev-dependencies]
agent-work-orders = [
{ name = "fastapi" },
{ name = "httpx" },
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "structlog" },
{ name = "uvicorn" },
]
agents = [ agents = [
{ name = "fastapi" }, { name = "fastapi" },
{ name = "httpx" }, { name = "httpx" },
@@ -264,6 +272,14 @@ server-reranking = [
requires-dist = [{ name = "structlog", specifier = ">=25.4.0" }] requires-dist = [{ name = "structlog", specifier = ">=25.4.0" }]
[package.metadata.requires-dev] [package.metadata.requires-dev]
agent-work-orders = [
{ name = "fastapi", specifier = ">=0.119.1" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "pydantic", specifier = ">=2.12.3" },
{ name = "python-dotenv", specifier = ">=1.1.1" },
{ name = "structlog", specifier = ">=25.4.0" },
{ name = "uvicorn", specifier = ">=0.38.0" },
]
agents = [ agents = [
{ name = "fastapi", specifier = ">=0.104.0" }, { name = "fastapi", specifier = ">=0.104.0" },
{ name = "httpx", specifier = ">=0.24.0" }, { name = "httpx", specifier = ">=0.24.0" },
@@ -886,16 +902,16 @@ wheels = [
[[package]] [[package]]
name = "fastapi" name = "fastapi"
version = "0.115.12" version = "0.119.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "starlette" }, { name = "starlette" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 } sdist = { url = "https://files.pythonhosted.org/packages/a6/f4/152127681182e6413e7a89684c434e19e7414ed7ac0c632999c3c6980640/fastapi-0.119.1.tar.gz", hash = "sha256:a5e3426edce3fe221af4e1992c6d79011b247e3b03cc57999d697fe76cbf8ae0", size = 338616 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 }, { url = "https://files.pythonhosted.org/packages/b1/26/e6d959b4ac959fdb3e9c4154656fc160794db6af8e64673d52759456bf07/fastapi-0.119.1-py3-none-any.whl", hash = "sha256:0b8c2a2cce853216e150e9bd4faaed88227f8eb37de21cb200771f491586a27f", size = 108123 },
] ]
[[package]] [[package]]
@@ -2104,7 +2120,7 @@ wheels = [
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.11.4" version = "2.12.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "annotated-types" }, { name = "annotated-types" },
@@ -2112,9 +2128,9 @@ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
{ name = "typing-inspection" }, { name = "typing-inspection" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 } sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 }, { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431 },
] ]
[[package]] [[package]]
@@ -2184,44 +2200,69 @@ vertexai = [
[[package]] [[package]]
name = "pydantic-core" name = "pydantic-core"
version = "2.33.2" version = "2.41.4"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043 },
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699 },
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121 },
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590 },
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869 },
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169 },
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165 },
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067 },
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997 },
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187 },
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204 },
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536 },
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132 },
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483 },
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688 },
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807 },
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669 },
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629 },
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049 },
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409 },
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635 },
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284 },
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566 },
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809 },
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119 },
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398 },
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735 },
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209 },
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324 },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515 },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819 },
{ url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866 },
{ url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034 },
{ url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022 },
{ url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495 },
{ url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131 },
{ url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236 },
{ url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573 },
{ url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467 },
{ url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754 },
{ url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754 },
{ url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115 },
{ url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400 },
{ url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070 },
{ url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277 },
{ url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608 },
{ url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614 },
{ url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904 },
{ url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538 },
{ url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183 },
{ url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542 },
{ url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897 },
{ url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087 },
{ url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387 },
{ url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495 },
{ url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008 },
] ]
[[package]] [[package]]
@@ -2434,11 +2475,11 @@ wheels = [
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.1.0" version = "1.1.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 },
] ]
[[package]] [[package]]
@@ -3230,23 +3271,23 @@ wheels = [
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.13.2" version = "4.15.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
] ]
[[package]] [[package]]
name = "typing-inspection" name = "typing-inspection"
version = "0.4.0" version = "0.4.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 },
] ]
[[package]] [[package]]
@@ -3269,15 +3310,15 @@ wheels = [
[[package]] [[package]]
name = "uvicorn" name = "uvicorn"
version = "0.34.2" version = "0.38.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "click" }, { name = "click" },
{ name = "h11" }, { name = "h11" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 } sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 }, { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109 },
] ]
[[package]] [[package]]