From 3800280f2eceaa281c528338b6aeb1201d2aa3c6 Mon Sep 17 00:00:00 2001 From: Rasmus Widing Date: Sat, 16 Aug 2025 00:10:23 +0300 Subject: [PATCH 1/3] Add Supabase key validation and simplify frontend state management - Add backend validation to detect and warn about anon vs service keys - Prevent startup with incorrect Supabase key configuration - Consolidate frontend state management following KISS principles - Remove duplicate state tracking and sessionStorage polling - Add clear error display when backend fails to start - Improve .env.example documentation with detailed key selection guide - Add comprehensive test coverage for validation logic - Remove unused test results checking to eliminate 404 errors The implementation now warns users about key misconfiguration while maintaining backward compatibility. Frontend state is simplified with MainLayout as the single source of truth for backend status. --- .env.example | 19 +- README.md | 55 ++-- archon-ui-main/Dockerfile | 6 +- .../src/components/BackendStartupError.tsx | 74 +++++ .../src/components/layouts/MainLayout.tsx | 32 +- .../components/onboarding/ProviderStep.tsx | 138 ++++----- .../src/components/settings/TestStatus.tsx | 14 +- archon-ui-main/src/pages/SettingsPage.tsx | 126 ++++---- .../src/services/credentialsService.ts | 277 +++++++++++------- archon-ui-main/src/services/testService.ts | 12 - archon-ui-main/test/errors.test.tsx | 39 ++- archon-ui-main/test/pages.test.tsx | 2 +- archon-ui-main/test/setup.ts | 5 +- migration/complete_setup.sql | 56 ++-- python/src/server/config/config.py | 66 ++++- python/src/server/main.py | 5 + .../src/server/services/credential_service.py | 16 +- python/tests/test_settings_api.py | 2 + python/tests/test_supabase_validation.py | 221 ++++++++++++++ 19 files changed, 848 insertions(+), 317 deletions(-) create mode 100644 archon-ui-main/src/components/BackendStartupError.tsx create mode 100644 python/tests/test_supabase_validation.py diff --git a/.env.example b/.env.example index f2cf73e5..7f2a1ad0 100644 --- a/.env.example +++ b/.env.example @@ -5,9 +5,22 @@ # https://supabase.com/dashboard/project//settings/api SUPABASE_URL= -# Get your SUPABASE_SERVICE_KEY from the API Keys section of your Supabase project settings - -# https://supabase.com/dashboard/project//settings/api-keys -# On this page it is called the service_role secret. +# ⚠️ CRITICAL: You MUST use the SERVICE ROLE key, NOT the Anon key! ⚠️ +# +# COMMON MISTAKE: Using the anon (public) key will cause ALL saves to fail with "permission denied"! +# +# How to get the CORRECT key: +# 1. Go to: https://supabase.com/dashboard/project//settings/api +# 2. In the Settings menu, click on "API keys" +# 3. Find "Project API keys" section +# 4. You will see TWO keys - choose carefully: +# ❌ anon (public): WRONG - This is shorter, starts with "eyJhbGc..." and contains "anon" in the JWT +# ✅ service_role (secret): CORRECT - This is longer and contains "service_role" in the JWT +# +# The service_role key is typically much longer than the anon key. +# If you see errors like "Failed to save" or "Permission denied", you're using the wrong key! +# +# On the Supabase dashboard, it's labeled as "service_role" under "Project API keys" SUPABASE_SERVICE_KEY= # Optional: Set log level for debugging diff --git a/README.md b/README.md index 43e83bdc..5eccbcdf 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Archon is the **command center** for AI coding assistants. For you, it's a sleek interface to manage knowledge, context, and tasks for your projects. For the AI coding assistant(s), it's a **Model Context Protocol (MCP) server** to collaborate on and leverage the same knowledge, context, and tasks. Connect Claude Code, Kiro, Cursor, Windsurf, etc. to give your AI agents access to: - **Your documentation** (crawled websites, uploaded PDFs/docs) -- **Smart search capabilities** with advanced RAG strategies +- **Smart search capabilities** with advanced RAG strategies - **Task management** integrated with your knowledge base - **Real-time updates** as you add new content and collaborate with your coding assistant on tasks - **Much more** coming soon to build Archon into an integrated environment for all context engineering @@ -40,6 +40,7 @@ This new vision for Archon replaces the old one (the agenteer). Archon used to b ## Quick Start ### Prerequisites + - [Docker Desktop](https://www.docker.com/products/docker-desktop/) - [Supabase](https://supabase.com/) account (free tier or local Supabase both work) - [OpenAI API key](https://platform.openai.com/api-keys) (Gemini and Ollama are supported too!) @@ -47,12 +48,14 @@ This new vision for Archon replaces the old one (the agenteer). Archon used to b ### Setup Instructions 1. **Clone Repository**: + ```bash git clone https://github.com/coleam00/archon.git cd archon ``` 2. **Environment Configuration**: + ```bash cp .env.example .env # Edit .env and add your Supabase credentials: @@ -65,10 +68,11 @@ This new vision for Archon replaces the old one (the agenteer). Archon used to b 3. **Database Setup**: In your [Supabase project](https://supabase.com/dashboard) SQL Editor, copy, paste, and execute the contents of `migration/complete_setup.sql` 4. **Start Services**: + ```bash docker-compose up --build -d ``` - + This starts the core microservices: - **Server**: Core API and business logic (Port: 8181) - **MCP Server**: Protocol interface for AI clients (Port: 8051) @@ -90,17 +94,18 @@ If you need to completely reset your database and start fresh: ⚠️ Reset Database - This will delete ALL data for Archon! 1. **Run Reset Script**: In your Supabase SQL Editor, run the contents of `migration/RESET_DB.sql` - + ⚠️ WARNING: This will delete all Archon specific tables and data! Nothing else will be touched in your DB though. 2. **Rebuild Database**: After reset, run `migration/complete_setup.sql` to create all the tables again. 3. **Restart Services**: + ```bash docker-compose up -d ``` -4. **Reconfigure**: +4. **Reconfigure**: - Select your LLM/embedding provider and set the API key again - Re-upload any documents or re-crawl websites @@ -121,23 +126,25 @@ Once everything is running: ### Core Services -| Service | Container Name | Default URL | Purpose | -|---------|---------------|-------------|---------| -| **Web Interface** | archon-ui | http://localhost:3737 | Main dashboard and controls | -| **API Service** | archon-server | http://localhost:8181 | Web crawling, document processing | -| **MCP Server** | archon-mcp | http://localhost:8051 | Model Context Protocol interface | -| **Agents Service** | archon-agents | http://localhost:8052 | AI/ML operations, reranking | +| Service | Container Name | Default URL | Purpose | +| ------------------ | -------------- | --------------------- | --------------------------------- | +| **Web Interface** | archon-ui | http://localhost:3737 | Main dashboard and controls | +| **API Service** | archon-server | http://localhost:8181 | Web crawling, document processing | +| **MCP Server** | archon-mcp | http://localhost:8051 | Model Context Protocol interface | +| **Agents Service** | archon-agents | http://localhost:8052 | AI/ML operations, reranking | ## What's Included ### 🧠 Knowledge Management + - **Smart Web Crawling**: Automatically detects and crawls entire documentation sites, sitemaps, and individual pages - **Document Processing**: Upload and process PDFs, Word docs, markdown files, and text documents with intelligent chunking - **Code Example Extraction**: Automatically identifies and indexes code examples from documentation for enhanced search - **Vector Search**: Advanced semantic search with contextual embeddings for precise knowledge retrieval - **Source Management**: Organize knowledge by source, type, and tags for easy filtering -### 🤖 AI Integration +### 🤖 AI Integration + - **Model Context Protocol (MCP)**: Connect any MCP-compatible client (Claude Code, Cursor, even non-AI coding assistants like Claude Desktop) - **10 MCP Tools**: Comprehensive yet simple set of tools for RAG queries, task management, and project operations - **Multi-LLM Support**: Works with OpenAI, Ollama, and Google Gemini models @@ -145,12 +152,14 @@ Once everything is running: - **Real-time Streaming**: Live responses from AI agents with progress tracking ### 📋 Project & Task Management + - **Hierarchical Projects**: Organize work with projects, features, and tasks in a structured workflow -- **AI-Assisted Creation**: Generate project requirements and tasks using integrated AI agents +- **AI-Assisted Creation**: Generate project requirements and tasks using integrated AI agents - **Document Management**: Version-controlled documents with collaborative editing capabilities - **Progress Tracking**: Real-time updates and status management across all project activities ### 🔄 Real-time Collaboration + - **WebSocket Updates**: Live progress tracking for crawling, processing, and AI operations - **Multi-user Support**: Collaborative knowledge building and project management - **Background Processing**: Asynchronous operations that don't block the user interface @@ -184,17 +193,17 @@ Archon uses true microservices architecture with clear separation of concerns: ### Service Responsibilities -| Service | Location | Purpose | Key Features | -|---------|----------|---------|--------------| -| **Frontend** | `archon-ui-main/` | Web interface and dashboard | React, TypeScript, TailwindCSS, Socket.IO client | -| **Server** | `python/src/server/` | Core business logic and APIs | FastAPI, service layer, Socket.IO broadcasts, all ML/AI operations | -| **MCP Server** | `python/src/mcp/` | MCP protocol interface | Lightweight HTTP wrapper, 10 MCP tools, session management | -| **Agents** | `python/src/agents/` | PydanticAI agent hosting | Document and RAG agents, streaming responses | +| Service | Location | Purpose | Key Features | +| -------------- | -------------------- | ---------------------------- | ------------------------------------------------------------------ | +| **Frontend** | `archon-ui-main/` | Web interface and dashboard | React, TypeScript, TailwindCSS, Socket.IO client | +| **Server** | `python/src/server/` | Core business logic and APIs | FastAPI, service layer, Socket.IO broadcasts, all ML/AI operations | +| **MCP Server** | `python/src/mcp/` | MCP protocol interface | Lightweight HTTP wrapper, 10 MCP tools, session management | +| **Agents** | `python/src/agents/` | PydanticAI agent hosting | Document and RAG agents, streaming responses | ### Communication Patterns - **HTTP-based**: All inter-service communication uses HTTP APIs -- **Socket.IO**: Real-time updates from Server to Frontend +- **Socket.IO**: Real-time updates from Server to Frontend - **MCP Protocol**: AI clients connect to MCP Server via SSE or stdio - **No Direct Imports**: Services are truly independent with no shared code dependencies @@ -208,8 +217,9 @@ Archon uses true microservices architecture with clear separation of concerns: ## 🔧 Configuring Custom Ports & Hostname By default, Archon services run on the following ports: + - **Archon-UI**: 3737 -- **Archon-Server**: 8181 +- **Archon-Server**: 8181 - **Archon-MCP**: 8051 - **Archon-Agents**: 8052 - **Archon-Docs**: 3838 (optional) @@ -228,6 +238,7 @@ ARCHON_DOCS_PORT=3838 ``` Example: Running on different ports: + ```bash ARCHON_SERVER_PORT=8282 ARCHON_MCP_PORT=8151 @@ -248,11 +259,13 @@ HOST=myserver.com # Use public domain ``` This is useful when: + - Running Archon on a different machine and accessing it remotely - Using a custom domain name for your installation - Deploying in a network environment where `localhost` isn't accessible After changing hostname or ports: + 1. Restart Docker containers: `docker-compose down && docker-compose up -d` 2. Access the UI at: `http://${HOST}:${ARCHON_UI_PORT}` 3. Update your AI client configuration with the new hostname and MCP port @@ -265,7 +278,7 @@ For development with hot reload: # Backend services (with auto-reload) docker-compose up archon-server archon-mcp archon-agents --build -# Frontend (with hot reload) +# Frontend (with hot reload) cd archon-ui-main && npm run dev # Documentation (with hot reload) diff --git a/archon-ui-main/Dockerfile b/archon-ui-main/Dockerfile index a413744a..f36ed4c1 100644 --- a/archon-ui-main/Dockerfile +++ b/archon-ui-main/Dockerfile @@ -21,5 +21,9 @@ COPY . . # Expose Vite's default port EXPOSE 5173 +# Add a small startup script to wait a moment before starting Vite +# This helps ensure the backend is fully ready even after healthcheck passes +RUN echo '#!/bin/sh\nsleep 3\nexec npm run dev -- --host 0.0.0.0' > /app/start.sh && chmod +x /app/start.sh + # Start Vite dev server with host binding for Docker -CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0"] \ No newline at end of file +CMD ["/app/start.sh"] \ No newline at end of file diff --git a/archon-ui-main/src/components/BackendStartupError.tsx b/archon-ui-main/src/components/BackendStartupError.tsx new file mode 100644 index 00000000..a7a1e782 --- /dev/null +++ b/archon-ui-main/src/components/BackendStartupError.tsx @@ -0,0 +1,74 @@ +import React from 'react'; +import { AlertCircle, Terminal, RefreshCw } from 'lucide-react'; + +export const BackendStartupError: React.FC = () => { + const handleRetry = () => { + // Reload the page to retry + window.location.reload(); + }; + + return ( +
+
+
+
+ +
+

+ Backend Service Startup Failure +

+ +

+ The Archon backend service failed to start. This is typically due to a configuration issue. +

+ +
+
+ + Check Docker Logs +
+

+ Check the Archon-Server logs in Docker Desktop for detailed error information. +

+
+

1. Open Docker Desktop

+

2. Go to Containers tab

+

3. Click on Archon-Server

+

4. View the logs for the specific error message

+
+
+ +
+

+ Common issue: Using an ANON key instead of SERVICE key in your .env file +

+
+ +
+

+ After fixing the issue in your .env file, recreate the Docker containers: +

+ + docker compose down && docker compose up -d + +

+ Note: Use 'down' and 'up', not 'restart' - containers need to be recreated to load new environment variables +

+
+ +
+ +
+
+
+
+
+
+ ); +}; \ No newline at end of file diff --git a/archon-ui-main/src/components/layouts/MainLayout.tsx b/archon-ui-main/src/components/layouts/MainLayout.tsx index fca31387..d1acd52e 100644 --- a/archon-ui-main/src/components/layouts/MainLayout.tsx +++ b/archon-ui-main/src/components/layouts/MainLayout.tsx @@ -6,6 +6,7 @@ import { X } from 'lucide-react'; import { useToast } from '../../contexts/ToastContext'; import { credentialsService } from '../../services/credentialsService'; import { isLmConfigured } from '../../utils/onboarding'; +import { BackendStartupError } from '../BackendStartupError'; /** * Props for the MainLayout component */ @@ -29,13 +30,14 @@ export const MainLayout: React.FC = ({ const navigate = useNavigate(); const location = useLocation(); const [backendReady, setBackendReady] = useState(false); + const [backendStartupFailed, setBackendStartupFailed] = useState(false); // Check backend readiness useEffect(() => { const checkBackendHealth = async (retryCount = 0) => { - const maxRetries = 10; // Increased retries for initialization - const retryDelay = 1000; + const maxRetries = 3; // 3 retries total + const retryDelay = 1500; // 1.5 seconds between retries try { // Create AbortController for proper timeout handling @@ -58,6 +60,7 @@ export const MainLayout: React.FC = ({ if (healthData.ready === true) { console.log('✅ Backend is fully initialized'); setBackendReady(true); + setBackendStartupFailed(false); } else { // Backend is starting up but not ready yet console.log(`🔄 Backend initializing... (attempt ${retryCount + 1}/${maxRetries}):`, healthData.message || 'Loading credentials...'); @@ -66,9 +69,10 @@ export const MainLayout: React.FC = ({ if (retryCount < maxRetries) { setTimeout(() => { checkBackendHealth(retryCount + 1); - }, retryDelay); // Constant 1s retry during initialization + }, retryDelay); // Constant 1.5s retry during initialization } else { - console.warn('Backend initialization taking too long - skipping credential check'); + console.warn('Backend initialization taking too long - proceeding anyway'); + // Don't mark as failed yet, just not fully ready setBackendReady(false); } } @@ -80,7 +84,10 @@ export const MainLayout: React.FC = ({ const errorMessage = error instanceof Error ? (error.name === 'AbortError' ? 'Request timeout (5s)' : error.message) : 'Unknown error'; - console.log(`Backend not ready yet (attempt ${retryCount + 1}/${maxRetries}):`, errorMessage); + // Only log after first attempt to reduce noise during normal startup + if (retryCount > 0) { + console.log(`Backend not ready yet (attempt ${retryCount + 1}/${maxRetries}):`, errorMessage); + } // Retry if we haven't exceeded max retries if (retryCount < maxRetries) { @@ -88,8 +95,9 @@ export const MainLayout: React.FC = ({ checkBackendHealth(retryCount + 1); }, retryDelay * Math.pow(1.5, retryCount)); // Exponential backoff for connection errors } else { - console.warn('Backend not ready after maximum retries - skipping credential check'); + console.error('Backend startup failed after maximum retries - showing error message'); setBackendReady(false); + setBackendStartupFailed(true); } } }; @@ -99,11 +107,16 @@ export const MainLayout: React.FC = ({ setTimeout(() => { checkBackendHealth(); }, 1000); // Wait 1 second for initial app startup - }, [showToast, navigate]); // Removed backendReady from dependencies to prevent double execution + }, []); // Empty deps - only run once on mount // Check for onboarding redirect after backend is ready useEffect(() => { const checkOnboarding = async () => { + // Skip if backend failed to start + if (backendStartupFailed) { + return; + } + // Skip if not ready, already on onboarding, or already dismissed if (!backendReady || location.pathname === '/onboarding') { return; @@ -152,9 +165,12 @@ export const MainLayout: React.FC = ({ }; checkOnboarding(); - }, [backendReady, location.pathname, navigate, showToast]); + }, [backendReady, backendStartupFailed, location.pathname, navigate, showToast]); return
+ {/* Show backend startup error if backend failed to start */} + {backendStartupFailed && } + {/* Fixed full-page background grid that doesn't scroll */}
{/* Floating Navigation */} diff --git a/archon-ui-main/src/components/onboarding/ProviderStep.tsx b/archon-ui-main/src/components/onboarding/ProviderStep.tsx index 0f56d158..cb6469da 100644 --- a/archon-ui-main/src/components/onboarding/ProviderStep.tsx +++ b/archon-ui-main/src/components/onboarding/ProviderStep.tsx @@ -1,10 +1,10 @@ -import { useState } from 'react'; -import { Key, ExternalLink, Save, Loader } from 'lucide-react'; -import { Input } from '../ui/Input'; -import { Button } from '../ui/Button'; -import { Select } from '../ui/Select'; -import { useToast } from '../../contexts/ToastContext'; -import { credentialsService } from '../../services/credentialsService'; +import { useState } from "react"; +import { Key, ExternalLink, Save, Loader } from "lucide-react"; +import { Input } from "../ui/Input"; +import { Button } from "../ui/Button"; +import { Select } from "../ui/Select"; +import { useToast } from "../../contexts/ToastContext"; +import { credentialsService } from "../../services/credentialsService"; interface ProviderStepProps { onSaved: () => void; @@ -12,14 +12,14 @@ interface ProviderStepProps { } export const ProviderStep = ({ onSaved, onSkip }: ProviderStepProps) => { - const [provider, setProvider] = useState('openai'); - const [apiKey, setApiKey] = useState(''); + const [provider, setProvider] = useState("openai"); + const [apiKey, setApiKey] = useState(""); const [saving, setSaving] = useState(false); const { showToast } = useToast(); const handleSave = async () => { if (!apiKey.trim()) { - showToast('Please enter an API key', 'error'); + showToast("Please enter an API key", "error"); return; } @@ -27,60 +27,50 @@ export const ProviderStep = ({ onSaved, onSkip }: ProviderStepProps) => { try { // Save the API key await credentialsService.createCredential({ - key: 'OPENAI_API_KEY', + key: "OPENAI_API_KEY", value: apiKey, is_encrypted: true, - category: 'api_keys' + category: "api_keys", }); // Update the provider setting if needed await credentialsService.updateCredential({ - key: 'LLM_PROVIDER', - value: 'openai', + key: "LLM_PROVIDER", + value: "openai", is_encrypted: false, - category: 'rag_strategy' + category: "rag_strategy", }); - showToast('API key saved successfully!', 'success'); + showToast("API key saved successfully!", "success"); // Mark onboarding as dismissed when API key is saved - localStorage.setItem('onboardingDismissed', 'true'); + localStorage.setItem("onboardingDismissed", "true"); onSaved(); } catch (error) { - // Detailed error handling for critical configuration per alpha principles - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - const errorDetails = { - context: 'API key configuration', - operation: 'save_openai_key', - provider: 'openai', - error: errorMessage, - timestamp: new Date().toISOString() - }; - - // Log with full context and stack trace - console.error('API_KEY_SAVE_FAILED:', errorDetails, error); - + // Log error for debugging per alpha principles + const errorMessage = + error instanceof Error ? error.message : "Unknown error"; + console.error("Failed to save API key:", error); + // Show specific error details to help user resolve the issue - if (errorMessage.includes('duplicate') || errorMessage.includes('already exists')) { + if ( + errorMessage.includes("duplicate") || + errorMessage.includes("already exists") + ) { showToast( - 'API key already exists. Please update it in Settings if you want to change it.', - 'warning' + "API key already exists. Please update it in Settings if you want to change it.", + "warning", ); - } else if (errorMessage.includes('network') || errorMessage.includes('fetch')) { + } else if ( + errorMessage.includes("network") || + errorMessage.includes("fetch") + ) { showToast( `Network error while saving API key: ${errorMessage}. Please check your connection.`, - 'error' - ); - } else if (errorMessage.includes('unauthorized') || errorMessage.includes('forbidden')) { - showToast( - `Permission error: ${errorMessage}. Please check backend configuration.`, - 'error' + "error", ); } else { // Show the actual error for unknown issues - showToast( - `Failed to save API key: ${errorMessage}`, - 'error' - ); + showToast(`Failed to save API key: ${errorMessage}`, "error"); } } finally { setSaving(false); @@ -88,9 +78,9 @@ export const ProviderStep = ({ onSaved, onSkip }: ProviderStepProps) => { }; const handleSkip = () => { - showToast('You can configure your provider in Settings', 'info'); + showToast("You can configure your provider in Settings", "info"); // Mark onboarding as dismissed when skipping - localStorage.setItem('onboardingDismissed', 'true'); + localStorage.setItem("onboardingDismissed", "true"); onSkip(); }; @@ -103,21 +93,24 @@ export const ProviderStep = ({ onSaved, onSkip }: ProviderStepProps) => { value={provider} onChange={(e) => setProvider(e.target.value)} options={[ - { value: 'openai', label: 'OpenAI' }, - { value: 'google', label: 'Google Gemini' }, - { value: 'ollama', label: 'Ollama (Local)' }, + { value: "openai", label: "OpenAI" }, + { value: "google", label: "Google Gemini" }, + { value: "ollama", label: "Ollama (Local)" }, ]} accentColor="green" />

- {provider === 'openai' && 'OpenAI provides powerful models like GPT-4. You\'ll need an API key from OpenAI.'} - {provider === 'google' && 'Google Gemini offers advanced AI capabilities. Configure in Settings after setup.'} - {provider === 'ollama' && 'Ollama runs models locally on your machine. Configure in Settings after setup.'} + {provider === "openai" && + "OpenAI provides powerful models like GPT-4. You'll need an API key from OpenAI."} + {provider === "google" && + "Google Gemini offers advanced AI capabilities. Configure in Settings after setup."} + {provider === "ollama" && + "Ollama runs models locally on your machine. Configure in Settings after setup."}

{/* OpenAI API Key Input */} - {provider === 'openai' && ( + {provider === "openai" && ( <>
{ size="lg" onClick={handleSave} disabled={saving || !apiKey.trim()} - icon={saving ? : } + icon={ + saving ? ( + + ) : ( + + ) + } className="flex-1" > - {saving ? 'Saving...' : 'Save & Continue'} + {saving ? "Saving..." : "Save & Continue"}
)} ); -}; \ No newline at end of file +}; diff --git a/archon-ui-main/src/components/settings/TestStatus.tsx b/archon-ui-main/src/components/settings/TestStatus.tsx index 7484bdf9..143a9cab 100644 --- a/archon-ui-main/src/components/settings/TestStatus.tsx +++ b/archon-ui-main/src/components/settings/TestStatus.tsx @@ -70,23 +70,15 @@ export const TestStatus = () => { }; }, []); - // Check for test results availability + // Test results availability - not implemented yet useEffect(() => { - const checkResults = async () => { - const hasTestResults = await testService.hasTestResults(); - setHasResults(hasTestResults); - }; - checkResults(); + setHasResults(false); }, []); // Check for results when UI tests complete useEffect(() => { if (!uiTest.isRunning && uiTest.exitCode === 0) { - // Small delay to ensure files are written - setTimeout(async () => { - const hasTestResults = await testService.hasTestResults(); - setHasResults(hasTestResults); - }, 2000); + setHasResults(false); } }, [uiTest.isRunning, uiTest.exitCode]); diff --git a/archon-ui-main/src/pages/SettingsPage.tsx b/archon-ui-main/src/pages/SettingsPage.tsx index 46a2466a..e524836f 100644 --- a/archon-ui-main/src/pages/SettingsPage.tsx +++ b/archon-ui-main/src/pages/SettingsPage.tsx @@ -1,19 +1,35 @@ -import { useState, useEffect } from 'react'; -import { Loader, Settings, ChevronDown, ChevronUp, Palette, Key, Brain, Code, Activity, FileCode, Bug } from 'lucide-react'; -import { motion, AnimatePresence } from 'framer-motion'; -import { useToast } from '../contexts/ToastContext'; -import { useSettings } from '../contexts/SettingsContext'; -import { useStaggeredEntrance } from '../hooks/useStaggeredEntrance'; -import { FeaturesSection } from '../components/settings/FeaturesSection'; -import { APIKeysSection } from '../components/settings/APIKeysSection'; -import { RAGSettings } from '../components/settings/RAGSettings'; -import { CodeExtractionSettings } from '../components/settings/CodeExtractionSettings'; -import { TestStatus } from '../components/settings/TestStatus'; -import { IDEGlobalRules } from '../components/settings/IDEGlobalRules'; -import { ButtonPlayground } from '../components/settings/ButtonPlayground'; -import { CollapsibleSettingsCard } from '../components/ui/CollapsibleSettingsCard'; -import { BugReportButton } from '../components/bug-report/BugReportButton'; -import { credentialsService, RagSettings, CodeExtractionSettings as CodeExtractionSettingsType } from '../services/credentialsService'; +import { useState, useEffect } from "react"; +import { + Loader, + Settings, + ChevronDown, + ChevronUp, + Palette, + Key, + Brain, + Code, + Activity, + FileCode, + Bug, +} from "lucide-react"; +import { motion, AnimatePresence } from "framer-motion"; +import { useToast } from "../contexts/ToastContext"; +import { useSettings } from "../contexts/SettingsContext"; +import { useStaggeredEntrance } from "../hooks/useStaggeredEntrance"; +import { FeaturesSection } from "../components/settings/FeaturesSection"; +import { APIKeysSection } from "../components/settings/APIKeysSection"; +import { RAGSettings } from "../components/settings/RAGSettings"; +import { CodeExtractionSettings } from "../components/settings/CodeExtractionSettings"; +import { TestStatus } from "../components/settings/TestStatus"; +import { IDEGlobalRules } from "../components/settings/IDEGlobalRules"; +import { ButtonPlayground } from "../components/settings/ButtonPlayground"; +import { CollapsibleSettingsCard } from "../components/ui/CollapsibleSettingsCard"; +import { BugReportButton } from "../components/bug-report/BugReportButton"; +import { + credentialsService, + RagSettings, + CodeExtractionSettings as CodeExtractionSettingsType, +} from "../services/credentialsService"; export const SettingsPage = () => { const [ragSettings, setRagSettings] = useState({ @@ -22,56 +38,56 @@ export const SettingsPage = () => { USE_HYBRID_SEARCH: true, USE_AGENTIC_RAG: true, USE_RERANKING: true, - MODEL_CHOICE: 'gpt-4.1-nano' - }); - const [codeExtractionSettings, setCodeExtractionSettings] = useState({ - MIN_CODE_BLOCK_LENGTH: 250, - MAX_CODE_BLOCK_LENGTH: 5000, - ENABLE_COMPLETE_BLOCK_DETECTION: true, - ENABLE_LANGUAGE_SPECIFIC_PATTERNS: true, - ENABLE_PROSE_FILTERING: true, - MAX_PROSE_RATIO: 0.15, - MIN_CODE_INDICATORS: 3, - ENABLE_DIAGRAM_FILTERING: true, - ENABLE_CONTEXTUAL_LENGTH: true, - CODE_EXTRACTION_MAX_WORKERS: 3, - CONTEXT_WINDOW_SIZE: 1000, - ENABLE_CODE_SUMMARIES: true + MODEL_CHOICE: "gpt-4.1-nano", }); + const [codeExtractionSettings, setCodeExtractionSettings] = + useState({ + MIN_CODE_BLOCK_LENGTH: 250, + MAX_CODE_BLOCK_LENGTH: 5000, + ENABLE_COMPLETE_BLOCK_DETECTION: true, + ENABLE_LANGUAGE_SPECIFIC_PATTERNS: true, + ENABLE_PROSE_FILTERING: true, + MAX_PROSE_RATIO: 0.15, + MIN_CODE_INDICATORS: 3, + ENABLE_DIAGRAM_FILTERING: true, + ENABLE_CONTEXTUAL_LENGTH: true, + CODE_EXTRACTION_MAX_WORKERS: 3, + CONTEXT_WINDOW_SIZE: 1000, + ENABLE_CODE_SUMMARIES: true, + }); const [loading, setLoading] = useState(true); const [error, setError] = useState(null); const [showButtonPlayground, setShowButtonPlayground] = useState(false); const { showToast } = useToast(); const { projectsEnabled } = useSettings(); - + // Use staggered entrance animation - const { isVisible, containerVariants, itemVariants, titleVariants } = useStaggeredEntrance( - [1, 2, 3, 4], - 0.15 - ); + const { isVisible, containerVariants, itemVariants, titleVariants } = + useStaggeredEntrance([1, 2, 3, 4], 0.15); // Load settings on mount useEffect(() => { loadSettings(); }, []); - const loadSettings = async () => { + const loadSettings = async (isRetry = false) => { try { setLoading(true); setError(null); - + // Load RAG settings const ragSettingsData = await credentialsService.getRagSettings(); setRagSettings(ragSettingsData); - + // Load Code Extraction settings - const codeExtractionSettingsData = await credentialsService.getCodeExtractionSettings(); + const codeExtractionSettingsData = + await credentialsService.getCodeExtractionSettings(); setCodeExtractionSettings(codeExtractionSettingsData); } catch (err) { - setError('Failed to load settings'); + setError("Failed to load settings"); console.error(err); - showToast('Failed to load settings', 'error'); + showToast("Failed to load settings", "error"); } finally { setLoading(false); } @@ -88,12 +104,15 @@ export const SettingsPage = () => { return ( {/* Header */} - + { + {/* Main content with two-column layout */}
{/* Left Column */} @@ -165,7 +185,10 @@ export const SettingsPage = () => { storageKey="rag-settings" defaultExpanded={true} > - + @@ -176,9 +199,9 @@ export const SettingsPage = () => { storageKey="code-extraction" defaultExpanded={true} > - @@ -194,7 +217,8 @@ export const SettingsPage = () => { >

- Found a bug or issue? Report it to help improve Archon V2 Alpha. + Found a bug or issue? Report it to help improve Archon V2 + Alpha.

@@ -234,7 +258,7 @@ export const SettingsPage = () => { {showButtonPlayground && ( { )} ); -}; \ No newline at end of file +}; diff --git a/archon-ui-main/src/services/credentialsService.ts b/archon-ui-main/src/services/credentialsService.ts index d3e10962..bb14b489 100644 --- a/archon-ui-main/src/services/credentialsService.ts +++ b/archon-ui-main/src/services/credentialsService.ts @@ -53,56 +53,81 @@ export interface CodeExtractionSettings { ENABLE_CODE_SUMMARIES: boolean; } -import { getApiUrl } from '../config/api'; +import { getApiUrl } from "../config/api"; class CredentialsService { private baseUrl = getApiUrl(); + private handleCredentialError(error: any, context: string): Error { + const errorMessage = error instanceof Error ? error.message : String(error); + + // Check for network errors + if ( + errorMessage.toLowerCase().includes("network") || + errorMessage.includes("fetch") || + errorMessage.includes("Failed to fetch") + ) { + return new Error( + `Network error while ${context.toLowerCase()}: ${errorMessage}. ` + + `Please check your connection and server status.`, + ); + } + + // Return original error with context + return new Error(`${context} failed: ${errorMessage}`); + } + async getAllCredentials(): Promise { const response = await fetch(`${this.baseUrl}/api/credentials`); if (!response.ok) { - throw new Error('Failed to fetch credentials'); + throw new Error("Failed to fetch credentials"); } return response.json(); } async getCredentialsByCategory(category: string): Promise { - const response = await fetch(`${this.baseUrl}/api/credentials/categories/${category}`); + const response = await fetch( + `${this.baseUrl}/api/credentials/categories/${category}`, + ); if (!response.ok) { throw new Error(`Failed to fetch credentials for category: ${category}`); } const result = await response.json(); - + // The API returns {credentials: {...}} where credentials is a dict // Convert to array format expected by frontend - if (result.credentials && typeof result.credentials === 'object') { - return Object.entries(result.credentials).map(([key, value]: [string, any]) => { - if (value && typeof value === 'object' && value.is_encrypted) { - return { - key, - value: undefined, - encrypted_value: value.encrypted_value, - is_encrypted: true, - category, - description: value.description - }; - } else { - return { - key, - value: value, - encrypted_value: undefined, - is_encrypted: false, - category, - description: '' - }; - } - }); + if (result.credentials && typeof result.credentials === "object") { + return Object.entries(result.credentials).map( + ([key, value]: [string, any]) => { + if (value && typeof value === "object" && value.is_encrypted) { + return { + key, + value: undefined, + encrypted_value: value.encrypted_value, + is_encrypted: true, + category, + description: value.description, + }; + } else { + return { + key, + value: value, + encrypted_value: undefined, + is_encrypted: false, + category, + description: "", + }; + } + }, + ); } - + return []; } - async getCredential(key: string): Promise<{ key: string; value?: string; is_encrypted?: boolean }> { + async getCredential( + key: string, + ): Promise<{ key: string; value?: string; is_encrypted?: boolean }> { const response = await fetch(`${this.baseUrl}/api/credentials/${key}`); if (!response.ok) { if (response.status === 404) { @@ -115,24 +140,24 @@ class CredentialsService { } async getRagSettings(): Promise { - const ragCredentials = await this.getCredentialsByCategory('rag_strategy'); - const apiKeysCredentials = await this.getCredentialsByCategory('api_keys'); - + const ragCredentials = await this.getCredentialsByCategory("rag_strategy"); + const apiKeysCredentials = await this.getCredentialsByCategory("api_keys"); + const settings: RagSettings = { USE_CONTEXTUAL_EMBEDDINGS: false, CONTEXTUAL_EMBEDDINGS_MAX_WORKERS: 3, USE_HYBRID_SEARCH: true, USE_AGENTIC_RAG: true, USE_RERANKING: true, - MODEL_CHOICE: 'gpt-4.1-nano', - LLM_PROVIDER: 'openai', - LLM_BASE_URL: '', - EMBEDDING_MODEL: '', + MODEL_CHOICE: "gpt-4.1-nano", + LLM_PROVIDER: "openai", + LLM_BASE_URL: "", + EMBEDDING_MODEL: "", // Crawling Performance Settings defaults CRAWL_BATCH_SIZE: 50, CRAWL_MAX_CONCURRENT: 10, - CRAWL_WAIT_STRATEGY: 'domcontentloaded', - CRAWL_PAGE_TIMEOUT: 60000, // Increased from 30s to 60s for documentation sites + CRAWL_WAIT_STRATEGY: "domcontentloaded", + CRAWL_PAGE_TIMEOUT: 60000, // Increased from 30s to 60s for documentation sites CRAWL_DELAY_BEFORE_HTML: 0.5, // Storage Performance Settings defaults DOCUMENT_STORAGE_BATCH_SIZE: 50, @@ -143,30 +168,50 @@ class CredentialsService { MEMORY_THRESHOLD_PERCENT: 80, DISPATCHER_CHECK_INTERVAL: 30, CODE_EXTRACTION_BATCH_SIZE: 50, - CODE_SUMMARY_MAX_WORKERS: 3 + CODE_SUMMARY_MAX_WORKERS: 3, }; // Map credentials to settings - [...ragCredentials, ...apiKeysCredentials].forEach(cred => { + [...ragCredentials, ...apiKeysCredentials].forEach((cred) => { if (cred.key in settings) { // String fields - if (['MODEL_CHOICE', 'LLM_PROVIDER', 'LLM_BASE_URL', 'EMBEDDING_MODEL', 'CRAWL_WAIT_STRATEGY'].includes(cred.key)) { - (settings as any)[cred.key] = cred.value || ''; - } + if ( + [ + "MODEL_CHOICE", + "LLM_PROVIDER", + "LLM_BASE_URL", + "EMBEDDING_MODEL", + "CRAWL_WAIT_STRATEGY", + ].includes(cred.key) + ) { + (settings as any)[cred.key] = cred.value || ""; + } // Number fields - else if (['CONTEXTUAL_EMBEDDINGS_MAX_WORKERS', 'CRAWL_BATCH_SIZE', 'CRAWL_MAX_CONCURRENT', - 'CRAWL_PAGE_TIMEOUT', 'DOCUMENT_STORAGE_BATCH_SIZE', 'EMBEDDING_BATCH_SIZE', - 'DELETE_BATCH_SIZE', 'MEMORY_THRESHOLD_PERCENT', 'DISPATCHER_CHECK_INTERVAL', - 'CODE_EXTRACTION_BATCH_SIZE', 'CODE_SUMMARY_MAX_WORKERS'].includes(cred.key)) { - (settings as any)[cred.key] = parseInt(cred.value || '0', 10) || (settings as any)[cred.key]; + else if ( + [ + "CONTEXTUAL_EMBEDDINGS_MAX_WORKERS", + "CRAWL_BATCH_SIZE", + "CRAWL_MAX_CONCURRENT", + "CRAWL_PAGE_TIMEOUT", + "DOCUMENT_STORAGE_BATCH_SIZE", + "EMBEDDING_BATCH_SIZE", + "DELETE_BATCH_SIZE", + "MEMORY_THRESHOLD_PERCENT", + "DISPATCHER_CHECK_INTERVAL", + "CODE_EXTRACTION_BATCH_SIZE", + "CODE_SUMMARY_MAX_WORKERS", + ].includes(cred.key) + ) { + (settings as any)[cred.key] = + parseInt(cred.value || "0", 10) || (settings as any)[cred.key]; } // Float fields - else if (cred.key === 'CRAWL_DELAY_BEFORE_HTML') { - settings[cred.key] = parseFloat(cred.value || '0.5') || 0.5; + else if (cred.key === "CRAWL_DELAY_BEFORE_HTML") { + settings[cred.key] = parseFloat(cred.value || "0.5") || 0.5; } // Boolean fields else { - (settings as any)[cred.key] = cred.value === 'true'; + (settings as any)[cred.key] = cred.value === "true"; } } }); @@ -175,71 +220,96 @@ class CredentialsService { } async updateCredential(credential: Credential): Promise { - const response = await fetch(`${this.baseUrl}/api/credentials/${credential.key}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(credential), - }); - - if (!response.ok) { - throw new Error('Failed to update credential'); + try { + const response = await fetch( + `${this.baseUrl}/api/credentials/${credential.key}`, + { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(credential), + }, + ); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`HTTP ${response.status}: ${errorText}`); + } + + return response.json(); + } catch (error) { + throw this.handleCredentialError( + error, + `Updating credential '${credential.key}'`, + ); } - - return response.json(); } async createCredential(credential: Credential): Promise { - const response = await fetch(`${this.baseUrl}/api/credentials`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(credential), - }); - - if (!response.ok) { - throw new Error('Failed to create credential'); + try { + const response = await fetch(`${this.baseUrl}/api/credentials`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(credential), + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`HTTP ${response.status}: ${errorText}`); + } + + return response.json(); + } catch (error) { + throw this.handleCredentialError( + error, + `Creating credential '${credential.key}'`, + ); } - - return response.json(); } async deleteCredential(key: string): Promise { - const response = await fetch(`${this.baseUrl}/api/credentials/${key}`, { - method: 'DELETE', - }); - - if (!response.ok) { - throw new Error('Failed to delete credential'); + try { + const response = await fetch(`${this.baseUrl}/api/credentials/${key}`, { + method: "DELETE", + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error(`HTTP ${response.status}: ${errorText}`); + } + } catch (error) { + throw this.handleCredentialError(error, `Deleting credential '${key}'`); } } async updateRagSettings(settings: RagSettings): Promise { const promises = []; - + // Update all RAG strategy settings for (const [key, value] of Object.entries(settings)) { // Skip undefined values if (value === undefined) continue; - + promises.push( this.updateCredential({ key, value: value.toString(), is_encrypted: false, - category: 'rag_strategy', - }) + category: "rag_strategy", + }), ); } - + await Promise.all(promises); } async getCodeExtractionSettings(): Promise { - const codeExtractionCredentials = await this.getCredentialsByCategory('code_extraction'); - + const codeExtractionCredentials = + await this.getCredentialsByCategory("code_extraction"); + const settings: CodeExtractionSettings = { MIN_CODE_BLOCK_LENGTH: 250, MAX_CODE_BLOCK_LENGTH: 5000, @@ -252,21 +322,24 @@ class CredentialsService { ENABLE_CONTEXTUAL_LENGTH: true, CODE_EXTRACTION_MAX_WORKERS: 3, CONTEXT_WINDOW_SIZE: 1000, - ENABLE_CODE_SUMMARIES: true + ENABLE_CODE_SUMMARIES: true, }; // Map credentials to settings - codeExtractionCredentials.forEach(cred => { + codeExtractionCredentials.forEach((cred) => { if (cred.key in settings) { const key = cred.key as keyof CodeExtractionSettings; - if (typeof settings[key] === 'number') { - if (key === 'MAX_PROSE_RATIO') { - settings[key] = parseFloat(cred.value || '0.15'); + if (typeof settings[key] === "number") { + if (key === "MAX_PROSE_RATIO") { + settings[key] = parseFloat(cred.value || "0.15"); } else { - settings[key] = parseInt(cred.value || settings[key].toString(), 10); + settings[key] = parseInt( + cred.value || settings[key].toString(), + 10, + ); } - } else if (typeof settings[key] === 'boolean') { - settings[key] = cred.value === 'true'; + } else if (typeof settings[key] === "boolean") { + settings[key] = cred.value === "true"; } } }); @@ -274,9 +347,11 @@ class CredentialsService { return settings; } - async updateCodeExtractionSettings(settings: CodeExtractionSettings): Promise { + async updateCodeExtractionSettings( + settings: CodeExtractionSettings, + ): Promise { const promises = []; - + // Update all code extraction settings for (const [key, value] of Object.entries(settings)) { promises.push( @@ -284,13 +359,13 @@ class CredentialsService { key, value: value.toString(), is_encrypted: false, - category: 'code_extraction', - }) + category: "code_extraction", + }), ); } - + await Promise.all(promises); } } -export const credentialsService = new CredentialsService(); \ No newline at end of file +export const credentialsService = new CredentialsService(); diff --git a/archon-ui-main/src/services/testService.ts b/archon-ui-main/src/services/testService.ts index 9b99957b..cf4b15d1 100644 --- a/archon-ui-main/src/services/testService.ts +++ b/archon-ui-main/src/services/testService.ts @@ -242,18 +242,6 @@ class TestService { } } - /** - * Check if test results are available - */ - async hasTestResults(): Promise { - try { - // Check for latest test results via API - const response = await fetch(`${API_BASE_URL}/api/tests/latest-results`); - return response.ok; - } catch { - return false; - } - } /** * Get coverage data for Test Results Modal from new API endpoints with fallback diff --git a/archon-ui-main/test/errors.test.tsx b/archon-ui-main/test/errors.test.tsx index 7ec739ef..3971f4af 100644 --- a/archon-ui-main/test/errors.test.tsx +++ b/archon-ui-main/test/errors.test.tsx @@ -1,6 +1,7 @@ import { render, screen, fireEvent } from '@testing-library/react' -import { describe, test, expect, vi } from 'vitest' +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest' import React from 'react' +import { credentialsService } from '../src/services/credentialsService' describe('Error Handling Tests', () => { test('api error simulation', () => { @@ -196,4 +197,40 @@ describe('Error Handling Tests', () => { fireEvent.click(screen.getByText('500 Error')) expect(screen.getByRole('alert')).toHaveTextContent('Something went wrong on our end') }) +}) + +describe('CredentialsService Error Handling', () => { + const originalFetch = global.fetch + + beforeEach(() => { + global.fetch = vi.fn() as any + }) + + afterEach(() => { + global.fetch = originalFetch + }) + + test('should handle network errors with context', async () => { + const mockError = new Error('Network request failed') + ;(global.fetch as any).mockRejectedValueOnce(mockError) + + await expect(credentialsService.createCredential({ + key: 'TEST_KEY', + value: 'test', + is_encrypted: false, + category: 'test' + })).rejects.toThrow(/Network error while creating credential 'test_key'/) + }) + + test('should preserve context in error messages', async () => { + const mockError = new Error('database error') + ;(global.fetch as any).mockRejectedValueOnce(mockError) + + await expect(credentialsService.updateCredential({ + key: 'OPENAI_API_KEY', + value: 'sk-test', + is_encrypted: true, + category: 'api_keys' + })).rejects.toThrow(/Updating credential 'OPENAI_API_KEY' failed/) + }) }) \ No newline at end of file diff --git a/archon-ui-main/test/pages.test.tsx b/archon-ui-main/test/pages.test.tsx index 5666ff9d..bd7111be 100644 --- a/archon-ui-main/test/pages.test.tsx +++ b/archon-ui-main/test/pages.test.tsx @@ -74,7 +74,7 @@ describe('Onboarding Detection Tests', () => { { key: 'LLM_PROVIDER', value: 'openai', category: 'rag_strategy' } ] const apiKeyCreds: NormalizedCredential[] = [ - { key: 'OPENAI_API_KEY', is_encrypted: true, category: 'api_keys' } + { key: 'OPENAI_API_KEY', is_encrypted: true, encrypted_value: 'encrypted_sk-test123', category: 'api_keys' } ] expect(isLmConfigured(ragCreds, apiKeyCreds)).toBe(true) diff --git a/archon-ui-main/test/setup.ts b/archon-ui-main/test/setup.ts index 71f76571..e5c1480d 100644 --- a/archon-ui-main/test/setup.ts +++ b/archon-ui-main/test/setup.ts @@ -2,6 +2,9 @@ import { expect, afterEach, vi } from 'vitest' import { cleanup } from '@testing-library/react' import '@testing-library/jest-dom/vitest' +// Set required environment variables for tests +process.env.ARCHON_SERVER_PORT = '8181' + // Clean up after each test afterEach(() => { cleanup() @@ -15,7 +18,7 @@ global.fetch = vi.fn(() => text: () => Promise.resolve(''), status: 200, } as Response) -) +) as any // Mock WebSocket class MockWebSocket { diff --git a/migration/complete_setup.sql b/migration/complete_setup.sql index 58144eb8..94a1778f 100644 --- a/migration/complete_setup.sql +++ b/migration/complete_setup.sql @@ -3,7 +3,7 @@ -- ===================================================== -- This script combines all migrations into a single file -- for easy one-time database initialization --- +-- -- Run this script in your Supabase SQL Editor to set up -- the complete Archon database schema and initial data -- ===================================================== @@ -47,9 +47,9 @@ BEGIN END; $$ language 'plpgsql'; -CREATE TRIGGER update_archon_settings_updated_at - BEFORE UPDATE ON archon_settings - FOR EACH ROW +CREATE TRIGGER update_archon_settings_updated_at + BEFORE UPDATE ON archon_settings + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -- Create RLS (Row Level Security) policies for settings @@ -197,10 +197,10 @@ CREATE TABLE IF NOT EXISTS archon_crawled_pages ( source_id TEXT NOT NULL, embedding VECTOR(1536), -- OpenAI embeddings are 1536 dimensions created_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, - + -- Add a unique constraint to prevent duplicate chunks for the same URL UNIQUE(url, chunk_number), - + -- Add foreign key constraint to sources table FOREIGN KEY (source_id) REFERENCES archon_sources(source_id) ); @@ -221,10 +221,10 @@ CREATE TABLE IF NOT EXISTS archon_code_examples ( source_id TEXT NOT NULL, embedding VECTOR(1536), -- OpenAI embeddings are 1536 dimensions created_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, - + -- Add a unique constraint to prevent duplicate chunks for the same URL UNIQUE(url, chunk_number), - + -- Add foreign key constraint to sources table FOREIGN KEY (source_id) REFERENCES archon_sources(source_id) ); @@ -416,7 +416,7 @@ CREATE TABLE IF NOT EXISTS archon_document_versions ( created_at TIMESTAMPTZ DEFAULT NOW(), -- Ensure we have either project_id OR task_id, not both CONSTRAINT chk_project_or_task CHECK ( - (project_id IS NOT NULL AND task_id IS NULL) OR + (project_id IS NOT NULL AND task_id IS NULL) OR (project_id IS NULL AND task_id IS NOT NULL) ), -- Unique constraint to prevent duplicate version numbers per field @@ -439,51 +439,51 @@ CREATE INDEX IF NOT EXISTS idx_archon_document_versions_version_number ON archon CREATE INDEX IF NOT EXISTS idx_archon_document_versions_created_at ON archon_document_versions(created_at); -- Apply triggers to tables -CREATE OR REPLACE TRIGGER update_archon_projects_updated_at - BEFORE UPDATE ON archon_projects +CREATE OR REPLACE TRIGGER update_archon_projects_updated_at + BEFORE UPDATE ON archon_projects FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE OR REPLACE TRIGGER update_archon_tasks_updated_at - BEFORE UPDATE ON archon_tasks +CREATE OR REPLACE TRIGGER update_archon_tasks_updated_at + BEFORE UPDATE ON archon_tasks FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -- Soft delete function for tasks CREATE OR REPLACE FUNCTION archive_task( task_id_param UUID, archived_by_param TEXT DEFAULT 'system' -) +) RETURNS BOOLEAN AS $$ DECLARE task_exists BOOLEAN; BEGIN -- Check if task exists and is not already archived SELECT EXISTS( - SELECT 1 FROM archon_tasks + SELECT 1 FROM archon_tasks WHERE id = task_id_param AND archived = FALSE ) INTO task_exists; - + IF NOT task_exists THEN RETURN FALSE; END IF; - + -- Archive the task - UPDATE archon_tasks - SET + UPDATE archon_tasks + SET archived = TRUE, archived_at = NOW(), archived_by = archived_by_param, updated_at = NOW() WHERE id = task_id_param; - + -- Also archive all subtasks - UPDATE archon_tasks - SET + UPDATE archon_tasks + SET archived = TRUE, - archived_at = NOW(), + archived_at = NOW(), archived_by = archived_by_param, updated_at = NOW() WHERE parent_task_id = task_id_param AND archived = FALSE; - + RETURN TRUE; END; $$ LANGUAGE plpgsql; @@ -520,8 +520,8 @@ CREATE TABLE IF NOT EXISTS archon_prompts ( CREATE INDEX IF NOT EXISTS idx_archon_prompts_name ON archon_prompts(prompt_name); -- Add trigger to automatically update updated_at timestamp -CREATE OR REPLACE TRIGGER update_archon_prompts_updated_at - BEFORE UPDATE ON archon_prompts +CREATE OR REPLACE TRIGGER update_archon_prompts_updated_at + BEFORE UPDATE ON archon_prompts FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -- ===================================================== @@ -787,9 +787,9 @@ Remember: Create production-ready data models.', 'System prompt for creating dat -- SETUP COMPLETE -- ===================================================== -- Your Archon database is now fully configured! --- +-- -- Next steps: -- 1. Add your OpenAI API key via the Settings UI -- 2. Enable Projects feature if needed -- 3. Start crawling websites or uploading documents --- ===================================================== \ No newline at end of file +-- ===================================================== diff --git a/python/src/server/config/config.py b/python/src/server/config/config.py index a86a13e0..c225aebe 100644 --- a/python/src/server/config/config.py +++ b/python/src/server/config/config.py @@ -6,6 +6,8 @@ import os from dataclasses import dataclass from urllib.parse import urlparse +import jwt + class ConfigurationError(Exception): """Raised when there's an error in configuration.""" @@ -46,6 +48,38 @@ def validate_openai_api_key(api_key: str) -> bool: return True +def validate_supabase_key(supabase_key: str) -> tuple[bool, str]: + """Validate Supabase key type and return validation result. + + Returns: + tuple[bool, str]: (is_valid, message) + - (False, "ANON_KEY_DETECTED") if anon key detected + - (True, "VALID_SERVICE_KEY") if service key detected + - (False, "UNKNOWN_KEY_TYPE:{role}") for unknown roles + - (True, "UNABLE_TO_VALIDATE") if JWT cannot be decoded + """ + if not supabase_key: + return False, "EMPTY_KEY" + + try: + # Decode JWT without verification to check the 'role' claim + # We don't verify the signature since we only need to check the role + decoded = jwt.decode(supabase_key, options={"verify_signature": False}) + role = decoded.get("role") + + if role == "anon": + return False, "ANON_KEY_DETECTED" + elif role == "service_role": + return True, "VALID_SERVICE_KEY" + else: + return False, f"UNKNOWN_KEY_TYPE:{role}" + + except Exception: + # If we can't decode the JWT, we'll allow it to proceed + # This handles new key formats or non-JWT keys + return True, "UNABLE_TO_VALIDATE" + + def validate_supabase_url(url: str) -> bool: """Validate Supabase URL format.""" if not url: @@ -80,6 +114,29 @@ def load_environment_config() -> EnvironmentConfig: validate_openai_api_key(openai_api_key) validate_supabase_url(supabase_url) + # Validate Supabase key type + is_valid_key, key_message = validate_supabase_key(supabase_service_key) + if not is_valid_key: + if key_message == "ANON_KEY_DETECTED": + raise ConfigurationError( + "CRITICAL: You are using a Supabase ANON key instead of a SERVICE key.\n\n" + "The ANON key is a public key with read-only permissions that cannot write to the database.\n" + "This will cause all database operations to fail with 'permission denied' errors.\n\n" + "To fix this:\n" + "1. Go to your Supabase project dashboard\n" + "2. Navigate to Settings > API keys\n" + "3. Find the 'service_role' key (NOT the 'anon' key)\n" + "4. Update your SUPABASE_SERVICE_KEY environment variable\n\n" + "Key characteristics:\n" + "- ANON key: Starts with 'eyJ...' and has role='anon' (public, read-only)\n" + "- SERVICE key: Starts with 'eyJ...' and has role='service_role' (private, full access)\n\n" + "Current key role detected: anon" + ) + elif key_message.startswith("UNKNOWN_KEY_TYPE:"): + role = key_message.split(":", 1)[1] + print(f"WARNING: Unknown Supabase key role '{role}'. Proceeding but may cause issues.") + # For UNABLE_TO_VALIDATE, we continue silently + # Optional environment variables with defaults host = os.getenv("HOST", "0.0.0.0") port_str = os.getenv("PORT") @@ -97,8 +154,8 @@ def load_environment_config() -> EnvironmentConfig: # Validate and convert port try: port = int(port_str) - except ValueError: - raise ConfigurationError(f"PORT must be a valid integer, got: {port_str}") + except ValueError as e: + raise ConfigurationError(f"PORT must be a valid integer, got: {port_str}") from e return EnvironmentConfig( openai_api_key=openai_api_key, @@ -110,6 +167,11 @@ def load_environment_config() -> EnvironmentConfig: ) +def get_config() -> EnvironmentConfig: + """Get environment configuration with validation.""" + return load_environment_config() + + def get_rag_strategy_config() -> RAGStrategyConfig: """Load RAG strategy configuration from environment variables.""" diff --git a/python/src/server/main.py b/python/src/server/main.py index 8612f225..bb70427b 100644 --- a/python/src/server/main.py +++ b/python/src/server/main.py @@ -80,6 +80,11 @@ async def lifespan(app: FastAPI): logger.info("🚀 Starting Archon backend...") try: + # Validate configuration FIRST - check for anon vs service key + from .config.config import get_config + + get_config() # This will raise ConfigurationError if anon key detected + # Initialize credentials from database FIRST - this is the foundation for everything else await initialize_credentials() diff --git a/python/src/server/services/credential_service.py b/python/src/server/services/credential_service.py index e8f3ad63..d49aa81c 100644 --- a/python/src/server/services/credential_service.py +++ b/python/src/server/services/credential_service.py @@ -227,14 +227,11 @@ class CredentialService: self._cache[key] = value # Upsert to database with proper conflict handling - result = ( - supabase.table("archon_settings") - .upsert( - data, - on_conflict="key", # Specify the unique column for conflict resolution - ) - .execute() - ) + # Since we validate service key at startup, permission errors here indicate actual database issues + supabase.table("archon_settings").upsert( + data, + on_conflict="key", # Specify the unique column for conflict resolution + ).execute() # Invalidate RAG settings cache if this is a rag_strategy setting if category == "rag_strategy": @@ -256,7 +253,8 @@ class CredentialService: try: supabase = self._get_supabase_client() - result = supabase.table("archon_settings").delete().eq("key", key).execute() + # Since we validate service key at startup, we can directly execute + supabase.table("archon_settings").delete().eq("key", key).execute() # Remove from cache if key in self._cache: diff --git a/python/tests/test_settings_api.py b/python/tests/test_settings_api.py index b51de701..b2c2d9b4 100644 --- a/python/tests/test_settings_api.py +++ b/python/tests/test_settings_api.py @@ -56,3 +56,5 @@ def test_existing_credential_returns_normally(client, mock_supabase_client): assert data["is_encrypted"] is False # Should not have is_default flag for real credentials assert "is_default" not in data + + diff --git a/python/tests/test_supabase_validation.py b/python/tests/test_supabase_validation.py new file mode 100644 index 00000000..9035dee2 --- /dev/null +++ b/python/tests/test_supabase_validation.py @@ -0,0 +1,221 @@ +""" +Unit tests for Supabase key validation functionality. +Tests the JWT-based validation of anon vs service keys. +""" + +import pytest +import jwt +from unittest.mock import patch, MagicMock + +from src.server.config.config import ( + validate_supabase_key, + ConfigurationError, + load_environment_config, +) + + +def test_validate_anon_key(): + """Test validation detects anon key correctly.""" + # Create mock anon key JWT + anon_payload = {"role": "anon", "iss": "supabase"} + anon_token = jwt.encode(anon_payload, "secret", algorithm="HS256") + + is_valid, msg = validate_supabase_key(anon_token) + + assert is_valid == False + assert msg == "ANON_KEY_DETECTED" + + +def test_validate_service_key(): + """Test validation detects service key correctly.""" + # Create mock service key JWT + service_payload = {"role": "service_role", "iss": "supabase"} + service_token = jwt.encode(service_payload, "secret", algorithm="HS256") + + is_valid, msg = validate_supabase_key(service_token) + + assert is_valid == True + assert msg == "VALID_SERVICE_KEY" + + +def test_validate_unknown_key(): + """Test validation handles unknown key roles.""" + # Create mock key with unknown role + unknown_payload = {"role": "custom", "iss": "supabase"} + unknown_token = jwt.encode(unknown_payload, "secret", algorithm="HS256") + + is_valid, msg = validate_supabase_key(unknown_token) + + assert is_valid == False + assert "UNKNOWN_KEY_TYPE" in msg + assert "custom" in msg + + +def test_validate_invalid_jwt(): + """Test validation handles invalid JWT format gracefully.""" + is_valid, msg = validate_supabase_key("not-a-jwt") + + # Should allow invalid JWT to proceed (might be new format) + assert is_valid == True + assert msg == "UNABLE_TO_VALIDATE" + + +def test_validate_empty_key(): + """Test validation handles empty key.""" + is_valid, msg = validate_supabase_key("") + + assert is_valid == False + assert msg == "EMPTY_KEY" + + +def test_config_raises_on_anon_key(): + """Test that configuration loading raises error when anon key detected.""" + # Create a mock anon key JWT + anon_payload = {"role": "anon", "iss": "supabase"} + mock_anon_key = jwt.encode(anon_payload, "secret", algorithm="HS256") + + with patch.dict( + "os.environ", + { + "SUPABASE_URL": "https://test.supabase.co", + "SUPABASE_SERVICE_KEY": mock_anon_key, + "OPENAI_API_KEY": "" # Clear any existing key + }, + clear=True # Clear all env vars to ensure isolation + ): + with pytest.raises(ConfigurationError) as exc_info: + load_environment_config() + + error_message = str(exc_info.value) + assert "CRITICAL: You are using a Supabase ANON key" in error_message + assert "service_role" in error_message + assert "permission denied" in error_message + + +def test_config_accepts_service_key(): + """Test that configuration loading accepts service key.""" + # Create a mock service key JWT + service_payload = {"role": "service_role", "iss": "supabase"} + mock_service_key = jwt.encode(service_payload, "secret", algorithm="HS256") + + with patch.dict( + "os.environ", + { + "SUPABASE_URL": "https://test.supabase.co", + "SUPABASE_SERVICE_KEY": mock_service_key, + "PORT": "8051", # Required for config + "OPENAI_API_KEY": "" # Clear any existing key + }, + clear=True # Clear all env vars to ensure isolation + ): + # Should not raise an exception + config = load_environment_config() + assert config.supabase_service_key == mock_service_key + + +def test_config_handles_invalid_jwt(): + """Test that configuration loading handles invalid JWT gracefully.""" + with patch.dict( + "os.environ", + { + "SUPABASE_URL": "https://test.supabase.co", + "SUPABASE_SERVICE_KEY": "invalid-jwt-key", + "PORT": "8051", # Required for config + "OPENAI_API_KEY": "" # Clear any existing key + }, + clear=True # Clear all env vars to ensure isolation + ): + with patch("builtins.print") as mock_print: + # Should not raise an exception for invalid JWT + config = load_environment_config() + assert config.supabase_service_key == "invalid-jwt-key" + + +def test_config_warns_on_unknown_role(): + """Test that configuration loading warns for unknown roles. + + NOTE: This currently prints a warning but doesn't fail. + TODO: Per alpha principles, unknown key types should fail fast, not just warn. + """ + # Create a mock key with unknown role + unknown_payload = {"role": "custom_role", "iss": "supabase"} + mock_unknown_key = jwt.encode(unknown_payload, "secret", algorithm="HS256") + + with patch.dict( + "os.environ", + { + "SUPABASE_URL": "https://test.supabase.co", + "SUPABASE_SERVICE_KEY": mock_unknown_key, + "PORT": "8051", # Required for config + "OPENAI_API_KEY": "" # Clear any existing key + }, + clear=True # Clear all env vars to ensure isolation + ): + with patch("builtins.print") as mock_print: + # Should not raise an exception but should print warning + config = load_environment_config() + assert config.supabase_service_key == mock_unknown_key + + # Check that warning was printed + mock_print.assert_called_once() + call_args = mock_print.call_args[0][0] + assert "WARNING: Unknown Supabase key role 'custom_role'" in call_args + + +def test_config_raises_on_anon_key_with_port(): + """Test that anon key detection works properly with all required env vars.""" + # Create a mock anon key JWT + anon_payload = {"role": "anon", "iss": "supabase"} + mock_anon_key = jwt.encode(anon_payload, "secret", algorithm="HS256") + + with patch.dict( + "os.environ", + { + "SUPABASE_URL": "https://test.supabase.co", + "SUPABASE_SERVICE_KEY": mock_anon_key, + "PORT": "8051", + "OPENAI_API_KEY": "sk-test123" # Valid OpenAI key + }, + clear=True + ): + # Should still raise ConfigurationError for anon key even with valid OpenAI key + with pytest.raises(ConfigurationError) as exc_info: + load_environment_config() + + error_message = str(exc_info.value) + assert "CRITICAL: You are using a Supabase ANON key" in error_message + + +def test_jwt_decoding_with_real_structure(): + """Test JWT decoding with realistic Supabase JWT structure.""" + # More realistic Supabase JWT payload structure + realistic_anon_payload = { + "aud": "authenticated", + "exp": 1999999999, + "iat": 1234567890, + "iss": "supabase", + "ref": "abcdefghij", + "role": "anon", + } + + realistic_service_payload = { + "aud": "authenticated", + "exp": 1999999999, + "iat": 1234567890, + "iss": "supabase", + "ref": "abcdefghij", + "role": "service_role", + } + + anon_token = jwt.encode(realistic_anon_payload, "secret", algorithm="HS256") + service_token = jwt.encode(realistic_service_payload, "secret", algorithm="HS256") + + # Test anon key detection + is_valid_anon, msg_anon = validate_supabase_key(anon_token) + assert is_valid_anon == False + assert msg_anon == "ANON_KEY_DETECTED" + + # Test service key detection + is_valid_service, msg_service = validate_supabase_key(service_token) + assert is_valid_service == True + assert msg_service == "VALID_SERVICE_KEY" From 4004090b4539808bfa5cd1fca8c417c3ff5134ac Mon Sep 17 00:00:00 2001 From: Rasmus Widing Date: Sat, 16 Aug 2025 00:23:37 +0300 Subject: [PATCH 2/3] Fix critical issues from code review - Use python-jose (already in dependencies) instead of PyJWT for JWT decoding - Make unknown Supabase key roles fail fast per alpha principles - Skip all JWT validations (not just signature) when checking role - Update tests to expect failure for unknown roles Fixes: - No need to add PyJWT dependency - python-jose provides JWT functionality - Unknown key types now raise ConfigurationError instead of warning - JWT decode properly skips all validations to only check role claim --- python/src/server/config/config.py | 22 +++++++++++++++++++--- python/tests/test_supabase_validation.py | 24 +++++++++--------------- 2 files changed, 28 insertions(+), 18 deletions(-) diff --git a/python/src/server/config/config.py b/python/src/server/config/config.py index c225aebe..560fbd77 100644 --- a/python/src/server/config/config.py +++ b/python/src/server/config/config.py @@ -6,7 +6,7 @@ import os from dataclasses import dataclass from urllib.parse import urlparse -import jwt +from jose import jwt class ConfigurationError(Exception): @@ -64,7 +64,18 @@ def validate_supabase_key(supabase_key: str) -> tuple[bool, str]: try: # Decode JWT without verification to check the 'role' claim # We don't verify the signature since we only need to check the role - decoded = jwt.decode(supabase_key, options={"verify_signature": False}) + # Also skip all other validations (aud, exp, etc) since we only care about the role + decoded = jwt.decode( + supabase_key, + '', + options={ + "verify_signature": False, + "verify_aud": False, + "verify_exp": False, + "verify_nbf": False, + "verify_iat": False + } + ) role = decoded.get("role") if role == "anon": @@ -134,7 +145,12 @@ def load_environment_config() -> EnvironmentConfig: ) elif key_message.startswith("UNKNOWN_KEY_TYPE:"): role = key_message.split(":", 1)[1] - print(f"WARNING: Unknown Supabase key role '{role}'. Proceeding but may cause issues.") + raise ConfigurationError( + f"CRITICAL: Unknown Supabase key role '{role}'.\n\n" + f"Expected 'service_role' but found '{role}'.\n" + f"This key type is not supported and will likely cause failures.\n\n" + f"Please use a valid service_role key from your Supabase dashboard." + ) # For UNABLE_TO_VALIDATE, we continue silently # Optional environment variables with defaults diff --git a/python/tests/test_supabase_validation.py b/python/tests/test_supabase_validation.py index 9035dee2..1e24e91a 100644 --- a/python/tests/test_supabase_validation.py +++ b/python/tests/test_supabase_validation.py @@ -4,7 +4,7 @@ Tests the JWT-based validation of anon vs service keys. """ import pytest -import jwt +from jose import jwt from unittest.mock import patch, MagicMock from src.server.config.config import ( @@ -131,12 +131,8 @@ def test_config_handles_invalid_jwt(): assert config.supabase_service_key == "invalid-jwt-key" -def test_config_warns_on_unknown_role(): - """Test that configuration loading warns for unknown roles. - - NOTE: This currently prints a warning but doesn't fail. - TODO: Per alpha principles, unknown key types should fail fast, not just warn. - """ +def test_config_fails_on_unknown_role(): + """Test that configuration loading fails fast for unknown roles per alpha principles.""" # Create a mock key with unknown role unknown_payload = {"role": "custom_role", "iss": "supabase"} mock_unknown_key = jwt.encode(unknown_payload, "secret", algorithm="HS256") @@ -151,15 +147,13 @@ def test_config_warns_on_unknown_role(): }, clear=True # Clear all env vars to ensure isolation ): - with patch("builtins.print") as mock_print: - # Should not raise an exception but should print warning - config = load_environment_config() - assert config.supabase_service_key == mock_unknown_key + # Should raise ConfigurationError for unknown role + with pytest.raises(ConfigurationError) as exc_info: + load_environment_config() - # Check that warning was printed - mock_print.assert_called_once() - call_args = mock_print.call_args[0][0] - assert "WARNING: Unknown Supabase key role 'custom_role'" in call_args + error_message = str(exc_info.value) + assert "Unknown Supabase key role 'custom_role'" in error_message + assert "Expected 'service_role'" in error_message def test_config_raises_on_anon_key_with_port(): From 120eae09b2f27aee663ab98cd227b2f056b92e00 Mon Sep 17 00:00:00 2001 From: Rasmus Widing Date: Mon, 18 Aug 2025 12:35:41 +0300 Subject: [PATCH 3/3] Remove unnecessary startup delay script from frontend Dockerfile - Rolled back to match main branch Dockerfile - Removed 3-second sleep script that was added for backend readiness - Container now runs npm directly without intermediate script - Tested and verified all services start correctly without the delay --- archon-ui-main/Dockerfile | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/archon-ui-main/Dockerfile b/archon-ui-main/Dockerfile index f36ed4c1..2ad5d5ff 100644 --- a/archon-ui-main/Dockerfile +++ b/archon-ui-main/Dockerfile @@ -21,9 +21,5 @@ COPY . . # Expose Vite's default port EXPOSE 5173 -# Add a small startup script to wait a moment before starting Vite -# This helps ensure the backend is fully ready even after healthcheck passes -RUN echo '#!/bin/sh\nsleep 3\nexec npm run dev -- --host 0.0.0.0' > /app/start.sh && chmod +x /app/start.sh - # Start Vite dev server with host binding for Docker -CMD ["/app/start.sh"] \ No newline at end of file +CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0"]