+ Released
+ {new Date(data.published_at).toLocaleDateString()}
+
+ )}
+
+
+ {error && (
+
+
+ {data?.check_error || "Failed to check for updates. Please try again later."}
+
+
+ )}
+
+ );
+}
diff --git a/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts b/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts
new file mode 100644
index 00000000..e1aefbd8
--- /dev/null
+++ b/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts
@@ -0,0 +1,59 @@
+/**
+ * TanStack Query hooks for version checking
+ */
+
+import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { STALE_TIMES } from "@/features/shared/queryPatterns";
+import { useSmartPolling } from "@/features/ui/hooks/useSmartPolling";
+import { versionService } from "../services/versionService";
+import type { VersionCheckResponse } from "../types";
+
+// Query key factory
+export const versionKeys = {
+ all: ["version"] as const,
+ check: () => [...versionKeys.all, "check"] as const,
+ current: () => [...versionKeys.all, "current"] as const,
+};
+
+/**
+ * Hook to check for version updates
+ * Polls every 5 minutes when tab is visible
+ */
+export function useVersionCheck() {
+ // Smart polling: check every 5 minutes when tab is visible
+ const { refetchInterval } = useSmartPolling(300000); // 5 minutes
+
+ return useQuery({
+ queryKey: versionKeys.check(),
+ queryFn: () => versionService.checkVersion(),
+ staleTime: STALE_TIMES.rare, // 5 minutes
+ refetchInterval,
+ retry: false, // Don't retry on 404 or network errors
+ });
+}
+
+/**
+ * Hook to get current version without checking for updates
+ */
+export function useCurrentVersion() {
+ return useQuery({
+ queryKey: versionKeys.current(),
+ queryFn: () => versionService.getCurrentVersion(),
+ staleTime: STALE_TIMES.static, // Never stale
+ });
+}
+
+/**
+ * Hook to clear version cache and force fresh check
+ */
+export function useClearVersionCache() {
+ const queryClient = useQueryClient();
+
+ return useMutation({
+ mutationFn: () => versionService.clearCache(),
+ onSuccess: () => {
+ // Invalidate version queries to force fresh check
+ queryClient.invalidateQueries({ queryKey: versionKeys.all });
+ },
+ });
+}
diff --git a/archon-ui-main/src/features/settings/version/services/versionService.ts b/archon-ui-main/src/features/settings/version/services/versionService.ts
new file mode 100644
index 00000000..4ef45b82
--- /dev/null
+++ b/archon-ui-main/src/features/settings/version/services/versionService.ts
@@ -0,0 +1,49 @@
+/**
+ * Service for version checking and update management
+ */
+
+import { callAPIWithETag } from "@/features/shared/apiWithEtag";
+import type { CurrentVersionResponse, VersionCheckResponse } from "../types";
+
+export const versionService = {
+ /**
+ * Check for available Archon updates
+ */
+ async checkVersion(): Promise {
+ try {
+ const response = await callAPIWithETag("/api/version/check");
+ return response as VersionCheckResponse;
+ } catch (error) {
+ console.error("Error checking version:", error);
+ throw error;
+ }
+ },
+
+ /**
+ * Get current Archon version without checking for updates
+ */
+ async getCurrentVersion(): Promise {
+ try {
+ const response = await callAPIWithETag("/api/version/current");
+ return response as CurrentVersionResponse;
+ } catch (error) {
+ console.error("Error getting current version:", error);
+ throw error;
+ }
+ },
+
+ /**
+ * Clear version cache to force fresh check
+ */
+ async clearCache(): Promise<{ message: string; success: boolean }> {
+ try {
+ const response = await callAPIWithETag("/api/version/clear-cache", {
+ method: "POST",
+ });
+ return response as { message: string; success: boolean };
+ } catch (error) {
+ console.error("Error clearing version cache:", error);
+ throw error;
+ }
+ },
+};
diff --git a/archon-ui-main/src/features/settings/version/types/index.ts b/archon-ui-main/src/features/settings/version/types/index.ts
new file mode 100644
index 00000000..04da0860
--- /dev/null
+++ b/archon-ui-main/src/features/settings/version/types/index.ts
@@ -0,0 +1,35 @@
+/**
+ * Type definitions for version checking and update management
+ */
+
+export interface ReleaseAsset {
+ name: string;
+ size: number;
+ download_count: number;
+ browser_download_url: string;
+ content_type: string;
+}
+
+export interface VersionCheckResponse {
+ current: string;
+ latest: string | null;
+ update_available: boolean;
+ release_url: string | null;
+ release_notes: string | null;
+ published_at: string | null;
+ check_error?: string | null;
+ assets?: ReleaseAsset[] | null;
+ author?: string | null;
+}
+
+export interface CurrentVersionResponse {
+ version: string;
+ timestamp: string;
+}
+
+export interface VersionStatus {
+ isLoading: boolean;
+ error: Error | null;
+ data: VersionCheckResponse | null;
+ lastChecked: Date | null;
+}
diff --git a/archon-ui-main/src/pages/SettingsPage.tsx b/archon-ui-main/src/pages/SettingsPage.tsx
index ad186e87..20c3c412 100644
--- a/archon-ui-main/src/pages/SettingsPage.tsx
+++ b/archon-ui-main/src/pages/SettingsPage.tsx
@@ -10,6 +10,8 @@ import {
Code,
FileCode,
Bug,
+ Info,
+ Database,
} from "lucide-react";
import { motion, AnimatePresence } from "framer-motion";
import { useToast } from "../features/ui/hooks/useToast";
@@ -28,6 +30,9 @@ import {
RagSettings,
CodeExtractionSettings as CodeExtractionSettingsType,
} from "../services/credentialsService";
+import { UpdateBanner } from "../features/settings/version/components/UpdateBanner";
+import { VersionStatusCard } from "../features/settings/version/components/VersionStatusCard";
+import { MigrationStatusCard } from "../features/settings/migrations/components/MigrationStatusCard";
export const SettingsPage = () => {
const [ragSettings, setRagSettings] = useState({
@@ -106,6 +111,9 @@ export const SettingsPage = () => {
variants={containerVariants}
className="w-full"
>
+ {/* Update Banner */}
+
+
{/* Header */}
{
+
+ {/* Version Status */}
+
+
+
+
+
+
+ {/* Migration Status */}
+
+
+
+
+
+
{projectsEnabled && (
0 THEN
+ -- Detect dimension
+ SELECT vector_dims(embedding) INTO dimension_detected
+ FROM archon_crawled_pages
+ WHERE embedding IS NOT NULL
+ LIMIT 1;
+
+ IF dimension_detected = 1536 THEN
+ UPDATE archon_crawled_pages
+ SET embedding_1536 = embedding,
+ embedding_dimension = 1536,
+ embedding_model = COALESCE(embedding_model, 'text-embedding-3-small')
+ WHERE embedding IS NOT NULL AND embedding_1536 IS NULL;
+ END IF;
+
+ -- Drop old column
+ ALTER TABLE archon_crawled_pages DROP COLUMN IF EXISTS embedding;
+ END IF;
+
+ -- Same for code_examples
+ SELECT COUNT(*) INTO code_examples_count
+ FROM information_schema.columns
+ WHERE table_name = 'archon_code_examples'
+ AND column_name = 'embedding';
+
+ IF code_examples_count > 0 THEN
+ SELECT vector_dims(embedding) INTO dimension_detected
+ FROM archon_code_examples
+ WHERE embedding IS NOT NULL
+ LIMIT 1;
+
+ IF dimension_detected = 1536 THEN
+ UPDATE archon_code_examples
+ SET embedding_1536 = embedding,
+ embedding_dimension = 1536,
+ embedding_model = COALESCE(embedding_model, 'text-embedding-3-small')
+ WHERE embedding IS NOT NULL AND embedding_1536 IS NULL;
+ END IF;
+
+ ALTER TABLE archon_code_examples DROP COLUMN IF EXISTS embedding;
+ END IF;
+END $$;
+
+-- Drop old indexes if they exist
+DROP INDEX IF EXISTS idx_archon_crawled_pages_embedding;
+DROP INDEX IF EXISTS idx_archon_code_examples_embedding;
+
+COMMIT;
+
+SELECT 'Ollama data migrated successfully' AS status;
\ No newline at end of file
diff --git a/migration/0.1.0/005_ollama_create_functions.sql b/migration/0.1.0/005_ollama_create_functions.sql
new file mode 100644
index 00000000..0426cdf6
--- /dev/null
+++ b/migration/0.1.0/005_ollama_create_functions.sql
@@ -0,0 +1,172 @@
+-- ======================================================================
+-- Migration 005: Ollama Implementation - Create Functions
+-- Creates search functions for multi-dimensional embeddings
+-- ======================================================================
+
+BEGIN;
+
+-- Helper function to detect embedding dimension
+CREATE OR REPLACE FUNCTION detect_embedding_dimension(embedding_vector vector)
+RETURNS INTEGER AS $$
+BEGIN
+ RETURN vector_dims(embedding_vector);
+END;
+$$ LANGUAGE plpgsql IMMUTABLE;
+
+-- Helper function to get column name for dimension
+CREATE OR REPLACE FUNCTION get_embedding_column_name(dimension INTEGER)
+RETURNS TEXT AS $$
+BEGIN
+ CASE dimension
+ WHEN 384 THEN RETURN 'embedding_384';
+ WHEN 768 THEN RETURN 'embedding_768';
+ WHEN 1024 THEN RETURN 'embedding_1024';
+ WHEN 1536 THEN RETURN 'embedding_1536';
+ WHEN 3072 THEN RETURN 'embedding_3072';
+ ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %', dimension;
+ END CASE;
+END;
+$$ LANGUAGE plpgsql IMMUTABLE;
+
+-- Multi-dimensional search for crawled pages
+CREATE OR REPLACE FUNCTION match_archon_crawled_pages_multi (
+ query_embedding VECTOR,
+ embedding_dimension INTEGER,
+ match_count INT DEFAULT 10,
+ filter JSONB DEFAULT '{}'::jsonb,
+ source_filter TEXT DEFAULT NULL
+) RETURNS TABLE (
+ id BIGINT,
+ url VARCHAR,
+ chunk_number INTEGER,
+ content TEXT,
+ metadata JSONB,
+ source_id TEXT,
+ similarity FLOAT
+)
+LANGUAGE plpgsql
+AS $$
+#variable_conflict use_column
+DECLARE
+ sql_query TEXT;
+ embedding_column TEXT;
+BEGIN
+ CASE embedding_dimension
+ WHEN 384 THEN embedding_column := 'embedding_384';
+ WHEN 768 THEN embedding_column := 'embedding_768';
+ WHEN 1024 THEN embedding_column := 'embedding_1024';
+ WHEN 1536 THEN embedding_column := 'embedding_1536';
+ WHEN 3072 THEN embedding_column := 'embedding_3072';
+ ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %', embedding_dimension;
+ END CASE;
+
+ sql_query := format('
+ SELECT id, url, chunk_number, content, metadata, source_id,
+ 1 - (%I <=> $1) AS similarity
+ FROM archon_crawled_pages
+ WHERE (%I IS NOT NULL)
+ AND metadata @> $3
+ AND ($4 IS NULL OR source_id = $4)
+ ORDER BY %I <=> $1
+ LIMIT $2',
+ embedding_column, embedding_column, embedding_column);
+
+ RETURN QUERY EXECUTE sql_query USING query_embedding, match_count, filter, source_filter;
+END;
+$$;
+
+-- Multi-dimensional search for code examples
+CREATE OR REPLACE FUNCTION match_archon_code_examples_multi (
+ query_embedding VECTOR,
+ embedding_dimension INTEGER,
+ match_count INT DEFAULT 10,
+ filter JSONB DEFAULT '{}'::jsonb,
+ source_filter TEXT DEFAULT NULL
+) RETURNS TABLE (
+ id BIGINT,
+ url VARCHAR,
+ chunk_number INTEGER,
+ content TEXT,
+ summary TEXT,
+ metadata JSONB,
+ source_id TEXT,
+ similarity FLOAT
+)
+LANGUAGE plpgsql
+AS $$
+#variable_conflict use_column
+DECLARE
+ sql_query TEXT;
+ embedding_column TEXT;
+BEGIN
+ CASE embedding_dimension
+ WHEN 384 THEN embedding_column := 'embedding_384';
+ WHEN 768 THEN embedding_column := 'embedding_768';
+ WHEN 1024 THEN embedding_column := 'embedding_1024';
+ WHEN 1536 THEN embedding_column := 'embedding_1536';
+ WHEN 3072 THEN embedding_column := 'embedding_3072';
+ ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %', embedding_dimension;
+ END CASE;
+
+ sql_query := format('
+ SELECT id, url, chunk_number, content, summary, metadata, source_id,
+ 1 - (%I <=> $1) AS similarity
+ FROM archon_code_examples
+ WHERE (%I IS NOT NULL)
+ AND metadata @> $3
+ AND ($4 IS NULL OR source_id = $4)
+ ORDER BY %I <=> $1
+ LIMIT $2',
+ embedding_column, embedding_column, embedding_column);
+
+ RETURN QUERY EXECUTE sql_query USING query_embedding, match_count, filter, source_filter;
+END;
+$$;
+
+-- Legacy compatibility (defaults to 1536D)
+CREATE OR REPLACE FUNCTION match_archon_crawled_pages (
+ query_embedding VECTOR(1536),
+ match_count INT DEFAULT 10,
+ filter JSONB DEFAULT '{}'::jsonb,
+ source_filter TEXT DEFAULT NULL
+) RETURNS TABLE (
+ id BIGINT,
+ url VARCHAR,
+ chunk_number INTEGER,
+ content TEXT,
+ metadata JSONB,
+ source_id TEXT,
+ similarity FLOAT
+)
+LANGUAGE plpgsql
+AS $$
+BEGIN
+ RETURN QUERY SELECT * FROM match_archon_crawled_pages_multi(query_embedding, 1536, match_count, filter, source_filter);
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION match_archon_code_examples (
+ query_embedding VECTOR(1536),
+ match_count INT DEFAULT 10,
+ filter JSONB DEFAULT '{}'::jsonb,
+ source_filter TEXT DEFAULT NULL
+) RETURNS TABLE (
+ id BIGINT,
+ url VARCHAR,
+ chunk_number INTEGER,
+ content TEXT,
+ summary TEXT,
+ metadata JSONB,
+ source_id TEXT,
+ similarity FLOAT
+)
+LANGUAGE plpgsql
+AS $$
+BEGIN
+ RETURN QUERY SELECT * FROM match_archon_code_examples_multi(query_embedding, 1536, match_count, filter, source_filter);
+END;
+$$;
+
+COMMIT;
+
+SELECT 'Ollama functions created successfully' AS status;
\ No newline at end of file
diff --git a/migration/0.1.0/006_ollama_create_indexes_optional.sql b/migration/0.1.0/006_ollama_create_indexes_optional.sql
new file mode 100644
index 00000000..d8a38080
--- /dev/null
+++ b/migration/0.1.0/006_ollama_create_indexes_optional.sql
@@ -0,0 +1,67 @@
+-- ======================================================================
+-- Migration 006: Ollama Implementation - Create Indexes (Optional)
+-- Creates vector indexes for performance (may timeout on large datasets)
+-- ======================================================================
+
+-- IMPORTANT: This migration creates vector indexes which are memory-intensive
+-- If this fails, you can skip it and the system will use brute-force search
+-- You can create these indexes later via direct database connection
+
+SET maintenance_work_mem = '512MB';
+SET statement_timeout = '10min';
+
+-- Create ONE index at a time to avoid memory issues
+-- Comment out any that fail and continue with the next
+
+-- Index 1 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_1536
+ON archon_crawled_pages USING ivfflat (embedding_1536 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 2 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_1536
+ON archon_code_examples USING ivfflat (embedding_1536 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 3 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_768
+ON archon_crawled_pages USING ivfflat (embedding_768 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 4 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_768
+ON archon_code_examples USING ivfflat (embedding_768 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 5 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_384
+ON archon_crawled_pages USING ivfflat (embedding_384 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 6 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_384
+ON archon_code_examples USING ivfflat (embedding_384 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 7 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_1024
+ON archon_crawled_pages USING ivfflat (embedding_1024 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Index 8 of 8
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_1024
+ON archon_code_examples USING ivfflat (embedding_1024 vector_cosine_ops)
+WITH (lists = 100);
+
+-- Simple B-tree indexes (these are fast)
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_model ON archon_crawled_pages (embedding_model);
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_dimension ON archon_crawled_pages (embedding_dimension);
+CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_llm_chat_model ON archon_crawled_pages (llm_chat_model);
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_model ON archon_code_examples (embedding_model);
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_dimension ON archon_code_examples (embedding_dimension);
+CREATE INDEX IF NOT EXISTS idx_archon_code_examples_llm_chat_model ON archon_code_examples (llm_chat_model);
+
+RESET maintenance_work_mem;
+RESET statement_timeout;
+
+SELECT 'Ollama indexes created (or skipped if timed out - that issue will be obvious in Supabase)' AS status;
\ No newline at end of file
diff --git a/migration/add_priority_column_to_tasks.sql b/migration/0.1.0/007_add_priority_column_to_tasks.sql
similarity index 100%
rename from migration/add_priority_column_to_tasks.sql
rename to migration/0.1.0/007_add_priority_column_to_tasks.sql
diff --git a/migration/0.1.0/008_add_migration_tracking.sql b/migration/0.1.0/008_add_migration_tracking.sql
new file mode 100644
index 00000000..5cac0c72
--- /dev/null
+++ b/migration/0.1.0/008_add_migration_tracking.sql
@@ -0,0 +1,65 @@
+-- Migration: 008_add_migration_tracking.sql
+-- Description: Create archon_migrations table for tracking applied database migrations
+-- Version: 0.1.0
+-- Author: Archon Team
+-- Date: 2025
+
+-- Create archon_migrations table for tracking applied migrations
+CREATE TABLE IF NOT EXISTS archon_migrations (
+ id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
+ version VARCHAR(20) NOT NULL,
+ migration_name VARCHAR(255) NOT NULL,
+ applied_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
+ checksum VARCHAR(32),
+ UNIQUE(version, migration_name)
+);
+
+-- Add index for fast lookups by version
+CREATE INDEX IF NOT EXISTS idx_archon_migrations_version ON archon_migrations(version);
+
+-- Add index for sorting by applied date
+CREATE INDEX IF NOT EXISTS idx_archon_migrations_applied_at ON archon_migrations(applied_at DESC);
+
+-- Add comment describing table purpose
+COMMENT ON TABLE archon_migrations IS 'Tracks database migrations that have been applied to maintain schema version consistency';
+COMMENT ON COLUMN archon_migrations.version IS 'Archon version that introduced this migration';
+COMMENT ON COLUMN archon_migrations.migration_name IS 'Filename of the migration SQL file';
+COMMENT ON COLUMN archon_migrations.applied_at IS 'Timestamp when migration was applied';
+COMMENT ON COLUMN archon_migrations.checksum IS 'Optional MD5 checksum of migration file content';
+
+-- Record this migration as applied (self-recording pattern)
+-- This allows the migration system to bootstrap itself
+INSERT INTO archon_migrations (version, migration_name)
+VALUES ('0.1.0', '008_add_migration_tracking')
+ON CONFLICT (version, migration_name) DO NOTHING;
+
+-- Retroactively record previously applied migrations (001-007)
+-- Since these migrations couldn't self-record (table didn't exist yet),
+-- we record them here to ensure the migration system knows they've been applied
+INSERT INTO archon_migrations (version, migration_name)
+VALUES
+ ('0.1.0', '001_add_source_url_display_name'),
+ ('0.1.0', '002_add_hybrid_search_tsvector'),
+ ('0.1.0', '003_ollama_add_columns'),
+ ('0.1.0', '004_ollama_migrate_data'),
+ ('0.1.0', '005_ollama_create_functions'),
+ ('0.1.0', '006_ollama_create_indexes_optional'),
+ ('0.1.0', '007_add_priority_column_to_tasks')
+ON CONFLICT (version, migration_name) DO NOTHING;
+
+-- Enable Row Level Security on migrations table
+ALTER TABLE archon_migrations ENABLE ROW LEVEL SECURITY;
+
+-- Drop existing policies if they exist (makes this idempotent)
+DROP POLICY IF EXISTS "Allow service role full access to archon_migrations" ON archon_migrations;
+DROP POLICY IF EXISTS "Allow authenticated users to read archon_migrations" ON archon_migrations;
+
+-- Create RLS policies for migrations table
+-- Service role has full access
+CREATE POLICY "Allow service role full access to archon_migrations" ON archon_migrations
+ FOR ALL USING (auth.role() = 'service_role');
+
+-- Authenticated users can only read migrations (they cannot modify migration history)
+CREATE POLICY "Allow authenticated users to read archon_migrations" ON archon_migrations
+ FOR SELECT TO authenticated
+ USING (true);
\ No newline at end of file
diff --git a/migration/0.1.0/DB_UPGRADE_INSTRUCTIONS.md b/migration/0.1.0/DB_UPGRADE_INSTRUCTIONS.md
new file mode 100644
index 00000000..5523d26a
--- /dev/null
+++ b/migration/0.1.0/DB_UPGRADE_INSTRUCTIONS.md
@@ -0,0 +1,157 @@
+# Archon Database Migrations
+
+This folder contains database migration scripts for upgrading existing Archon installations.
+
+## Available Migration Scripts
+
+### 1. `backup_database.sql` - Pre-Migration Backup
+**Always run this FIRST before any migration!**
+
+Creates timestamped backup tables of all your existing data:
+- ✅ Complete backup of `archon_crawled_pages`
+- ✅ Complete backup of `archon_code_examples`
+- ✅ Complete backup of `archon_sources`
+- ✅ Easy restore commands provided
+- ✅ Row count verification
+
+### 2. Migration Scripts (Run in Order)
+
+You only have to run the ones you haven't already! If you don't remember exactly, it is okay to rerun migration scripts.
+
+**2.1. `001_add_source_url_display_name.sql`**
+- Adds display name field to sources table
+- Improves UI presentation of crawled sources
+
+**2.2. `002_add_hybrid_search_tsvector.sql`**
+- Adds full-text search capabilities
+- Implements hybrid search with tsvector columns
+- Creates optimized search indexes
+
+**2.3. `003_ollama_add_columns.sql`**
+- Adds multi-dimensional embedding columns (384, 768, 1024, 1536, 3072 dimensions)
+- Adds model tracking fields (`llm_chat_model`, `embedding_model`, `embedding_dimension`)
+
+**2.4. `004_ollama_migrate_data.sql`**
+- Migrates existing embeddings to new multi-dimensional columns
+- Drops old embedding column after migration
+- Removes obsolete indexes
+
+**2.5. `005_ollama_create_functions.sql`**
+- Creates search functions for multi-dimensional embeddings
+- Adds helper functions for dimension detection
+- Maintains backward compatibility with legacy search functions
+
+**2.6. `006_ollama_create_indexes_optional.sql`**
+- Creates vector indexes for performance (may timeout on large datasets)
+- Creates B-tree indexes for model fields
+- Can be skipped if timeout occurs (system will use brute-force search)
+
+**2.7. `007_add_priority_column_to_tasks.sql`**
+- Adds priority field to tasks table
+- Enables task prioritization in project management
+
+**2.8. `008_add_migration_tracking.sql`**
+- Creates migration tracking table
+- Records all applied migrations
+- Enables migration version control
+
+## Migration Process (Follow This Order!)
+
+### Step 1: Backup Your Data
+```sql
+-- Run: backup_database.sql
+-- This creates timestamped backup tables of all your data
+```
+
+### Step 2: Run All Migration Scripts (In Order!)
+```sql
+-- Run each script in sequence:
+-- 1. Run: 001_add_source_url_display_name.sql
+-- 2. Run: 002_add_hybrid_search_tsvector.sql
+-- 3. Run: 003_ollama_add_columns.sql
+-- 4. Run: 004_ollama_migrate_data.sql
+-- 5. Run: 005_ollama_create_functions.sql
+-- 6. Run: 006_ollama_create_indexes_optional.sql (optional - may timeout)
+-- 7. Run: 007_add_priority_column_to_tasks.sql
+-- 8. Run: 008_add_migration_tracking.sql
+```
+
+### Step 3: Restart Services
+```bash
+docker compose restart
+```
+
+## How to Run Migrations
+
+### Method 1: Using Supabase Dashboard (Recommended)
+1. Open your Supabase project dashboard
+2. Go to **SQL Editor**
+3. Copy and paste the contents of the migration file
+4. Click **Run** to execute the migration
+5. **Important**: Supabase only shows the result of the last query - all our scripts end with a status summary table that shows the complete results
+
+### Method 2: Using psql Command Line
+```bash
+# Connect to your database
+psql -h your-supabase-host -p 5432 -U postgres -d postgres
+
+# Run the migrations in order
+\i /path/to/001_add_source_url_display_name.sql
+\i /path/to/002_add_hybrid_search_tsvector.sql
+\i /path/to/003_ollama_add_columns.sql
+\i /path/to/004_ollama_migrate_data.sql
+\i /path/to/005_ollama_create_functions.sql
+\i /path/to/006_ollama_create_indexes_optional.sql
+\i /path/to/007_add_priority_column_to_tasks.sql
+\i /path/to/008_add_migration_tracking.sql
+
+# Exit
+\q
+```
+
+### Method 3: Using Docker (if using local Supabase)
+```bash
+# Copy migrations to container
+docker cp 001_add_source_url_display_name.sql supabase-db:/tmp/
+docker cp 002_add_hybrid_search_tsvector.sql supabase-db:/tmp/
+docker cp 003_ollama_add_columns.sql supabase-db:/tmp/
+docker cp 004_ollama_migrate_data.sql supabase-db:/tmp/
+docker cp 005_ollama_create_functions.sql supabase-db:/tmp/
+docker cp 006_ollama_create_indexes_optional.sql supabase-db:/tmp/
+docker cp 007_add_priority_column_to_tasks.sql supabase-db:/tmp/
+docker cp 008_add_migration_tracking.sql supabase-db:/tmp/
+
+# Execute migrations in order
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/001_add_source_url_display_name.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/002_add_hybrid_search_tsvector.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/003_ollama_add_columns.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/004_ollama_migrate_data.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/005_ollama_create_functions.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/006_ollama_create_indexes_optional.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/007_add_priority_column_to_tasks.sql
+docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/008_add_migration_tracking.sql
+```
+
+## Migration Safety
+
+- ✅ **Safe to run multiple times** - Uses `IF NOT EXISTS` checks
+- ✅ **Non-destructive** - Preserves all existing data
+- ✅ **Automatic rollback** - Uses database transactions
+- ✅ **Comprehensive logging** - Detailed progress notifications
+
+## After Migration
+
+1. **Restart Archon Services:**
+ ```bash
+ docker-compose restart
+ ```
+
+2. **Verify Migration:**
+ - Check the Archon logs for any errors
+ - Try running a test crawl
+ - Verify search functionality works
+
+3. **Configure New Features:**
+ - Go to Settings page in Archon UI
+ - Configure your preferred LLM and embedding models
+ - New crawls will automatically use model tracking
diff --git a/migration/DB_UPGRADE_INSTRUCTIONS.md b/migration/DB_UPGRADE_INSTRUCTIONS.md
deleted file mode 100644
index 5ce32524..00000000
--- a/migration/DB_UPGRADE_INSTRUCTIONS.md
+++ /dev/null
@@ -1,167 +0,0 @@
-# Archon Database Migrations
-
-This folder contains database migration scripts for upgrading existing Archon installations.
-
-## Available Migration Scripts
-
-### 1. `backup_database.sql` - Pre-Migration Backup
-**Always run this FIRST before any migration!**
-
-Creates timestamped backup tables of all your existing data:
-- ✅ Complete backup of `archon_crawled_pages`
-- ✅ Complete backup of `archon_code_examples`
-- ✅ Complete backup of `archon_sources`
-- ✅ Easy restore commands provided
-- ✅ Row count verification
-
-### 2. `upgrade_database.sql` - Main Migration Script
-**Use this migration if you:**
-- Have an existing Archon installation from before multi-dimensional embedding support
-- Want to upgrade to the latest features including model tracking
-- Need to migrate existing embedding data to the new schema
-
-**Features added:**
-- ✅ Multi-dimensional embedding support (384, 768, 1024, 1536, 3072 dimensions)
-- ✅ Model tracking fields (`llm_chat_model`, `embedding_model`, `embedding_dimension`)
-- ✅ Optimized indexes for improved search performance
-- ✅ Enhanced search functions with dimension-aware querying
-- ✅ Automatic migration of existing embedding data
-- ✅ Legacy compatibility maintained
-
-### 3. `validate_migration.sql` - Post-Migration Validation
-**Run this after the migration to verify everything worked correctly**
-
-Validates your migration results:
-- ✅ Verifies all required columns were added
-- ✅ Checks that database indexes were created
-- ✅ Tests that all functions are working
-- ✅ Shows sample data with new fields
-- ✅ Provides clear success/failure reporting
-
-## Migration Process (Follow This Order!)
-
-### Step 1: Backup Your Data
-```sql
--- Run: backup_database.sql
--- This creates timestamped backup tables of all your data
-```
-
-### Step 2: Run the Main Migration
-```sql
--- Run: upgrade_database.sql
--- This adds all the new features and migrates existing data
-```
-
-### Step 3: Validate the Results
-```sql
--- Run: validate_migration.sql
--- This verifies everything worked correctly
-```
-
-### Step 4: Restart Services
-```bash
-docker compose restart
-```
-
-## How to Run Migrations
-
-### Method 1: Using Supabase Dashboard (Recommended)
-1. Open your Supabase project dashboard
-2. Go to **SQL Editor**
-3. Copy and paste the contents of the migration file
-4. Click **Run** to execute the migration
-5. **Important**: Supabase only shows the result of the last query - all our scripts end with a status summary table that shows the complete results
-
-### Method 2: Using psql Command Line
-```bash
-# Connect to your database
-psql -h your-supabase-host -p 5432 -U postgres -d postgres
-
-# Run the migration
-\i /path/to/upgrade_database.sql
-
-# Exit
-\q
-```
-
-### Method 3: Using Docker (if using local Supabase)
-```bash
-# Copy migration to container
-docker cp upgrade_database.sql supabase-db:/tmp/
-
-# Execute migration
-docker exec -it supabase-db psql -U postgres -d postgres -f /tmp/upgrade_database.sql
-```
-
-## Migration Safety
-
-- ✅ **Safe to run multiple times** - Uses `IF NOT EXISTS` checks
-- ✅ **Non-destructive** - Preserves all existing data
-- ✅ **Automatic rollback** - Uses database transactions
-- ✅ **Comprehensive logging** - Detailed progress notifications
-
-## After Migration
-
-1. **Restart Archon Services:**
- ```bash
- docker-compose restart
- ```
-
-2. **Verify Migration:**
- - Check the Archon logs for any errors
- - Try running a test crawl
- - Verify search functionality works
-
-3. **Configure New Features:**
- - Go to Settings page in Archon UI
- - Configure your preferred LLM and embedding models
- - New crawls will automatically use model tracking
-
-## Troubleshooting
-
-### Permission Errors
-If you get permission errors, ensure your database user has sufficient privileges:
-```sql
-GRANT ALL PRIVILEGES ON DATABASE postgres TO your_user;
-GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO your_user;
-```
-
-### Index Creation Failures
-If index creation fails due to resource constraints, the migration will continue. You can create indexes manually later:
-```sql
--- Example: Create missing index for 768-dimensional embeddings
-CREATE INDEX idx_archon_crawled_pages_embedding_768
-ON archon_crawled_pages USING ivfflat (embedding_768 vector_cosine_ops)
-WITH (lists = 100);
-```
-
-### Migration Verification
-Check that the migration completed successfully:
-```sql
--- Verify new columns exist
-SELECT column_name
-FROM information_schema.columns
-WHERE table_name = 'archon_crawled_pages'
-AND column_name IN ('llm_chat_model', 'embedding_model', 'embedding_dimension', 'embedding_384', 'embedding_768');
-
--- Verify functions exist
-SELECT routine_name
-FROM information_schema.routines
-WHERE routine_name IN ('match_archon_crawled_pages_multi', 'detect_embedding_dimension');
-```
-
-## Support
-
-If you encounter issues with the migration:
-
-1. Check the console output for detailed error messages
-2. Verify your database connection and permissions
-3. Ensure you have sufficient disk space for index creation
-4. Create a GitHub issue with the error details if problems persist
-
-## Version Compatibility
-
-- **Archon v2.0+**: Use `upgrade_database.sql`
-- **Earlier versions**: Use `complete_setup.sql` for fresh installations
-
-This migration is designed to bring any Archon installation up to the latest schema standards while preserving all existing data and functionality.
\ No newline at end of file
diff --git a/migration/RESET_DB.sql b/migration/RESET_DB.sql
index 775464f5..ef0066a9 100644
--- a/migration/RESET_DB.sql
+++ b/migration/RESET_DB.sql
@@ -63,7 +63,11 @@ BEGIN
-- Prompts policies
DROP POLICY IF EXISTS "Allow service role full access to archon_prompts" ON archon_prompts;
DROP POLICY IF EXISTS "Allow authenticated users to read archon_prompts" ON archon_prompts;
-
+
+ -- Migration tracking policies
+ DROP POLICY IF EXISTS "Allow service role full access to archon_migrations" ON archon_migrations;
+ DROP POLICY IF EXISTS "Allow authenticated users to read archon_migrations" ON archon_migrations;
+
-- Legacy table policies (for migration from old schema)
DROP POLICY IF EXISTS "Allow service role full access" ON settings;
DROP POLICY IF EXISTS "Allow authenticated users to read and update" ON settings;
@@ -174,7 +178,10 @@ BEGIN
-- Configuration System - new archon_ prefixed table
DROP TABLE IF EXISTS archon_settings CASCADE;
-
+
+ -- Migration tracking table
+ DROP TABLE IF EXISTS archon_migrations CASCADE;
+
-- Legacy tables (without archon_ prefix) - for migration purposes
DROP TABLE IF EXISTS document_versions CASCADE;
DROP TABLE IF EXISTS project_sources CASCADE;
diff --git a/migration/complete_setup.sql b/migration/complete_setup.sql
index 322e0b2f..1609060c 100644
--- a/migration/complete_setup.sql
+++ b/migration/complete_setup.sql
@@ -951,6 +951,62 @@ COMMENT ON COLUMN archon_document_versions.change_type IS 'Type of change: creat
COMMENT ON COLUMN archon_document_versions.document_id IS 'For docs arrays, the specific document ID that was changed';
COMMENT ON COLUMN archon_document_versions.task_id IS 'DEPRECATED: No longer used for new versions, kept for historical task version data';
+-- =====================================================
+-- SECTION 7: MIGRATION TRACKING
+-- =====================================================
+
+-- Create archon_migrations table for tracking applied database migrations
+CREATE TABLE IF NOT EXISTS archon_migrations (
+ id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
+ version VARCHAR(20) NOT NULL,
+ migration_name VARCHAR(255) NOT NULL,
+ applied_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
+ checksum VARCHAR(32),
+ UNIQUE(version, migration_name)
+);
+
+-- Add indexes for fast lookups
+CREATE INDEX IF NOT EXISTS idx_archon_migrations_version ON archon_migrations(version);
+CREATE INDEX IF NOT EXISTS idx_archon_migrations_applied_at ON archon_migrations(applied_at DESC);
+
+-- Add comments describing table purpose
+COMMENT ON TABLE archon_migrations IS 'Tracks database migrations that have been applied to maintain schema version consistency';
+COMMENT ON COLUMN archon_migrations.version IS 'Archon version that introduced this migration';
+COMMENT ON COLUMN archon_migrations.migration_name IS 'Filename of the migration SQL file';
+COMMENT ON COLUMN archon_migrations.applied_at IS 'Timestamp when migration was applied';
+COMMENT ON COLUMN archon_migrations.checksum IS 'Optional MD5 checksum of migration file content';
+
+-- Record all migrations as applied since this is a complete setup
+-- This ensures the migration system knows the database is fully up-to-date
+INSERT INTO archon_migrations (version, migration_name)
+VALUES
+ ('0.1.0', '001_add_source_url_display_name'),
+ ('0.1.0', '002_add_hybrid_search_tsvector'),
+ ('0.1.0', '003_ollama_add_columns'),
+ ('0.1.0', '004_ollama_migrate_data'),
+ ('0.1.0', '005_ollama_create_functions'),
+ ('0.1.0', '006_ollama_create_indexes_optional'),
+ ('0.1.0', '007_add_priority_column_to_tasks'),
+ ('0.1.0', '008_add_migration_tracking')
+ON CONFLICT (version, migration_name) DO NOTHING;
+
+-- Enable Row Level Security on migrations table
+ALTER TABLE archon_migrations ENABLE ROW LEVEL SECURITY;
+
+-- Drop existing policies if they exist (makes this idempotent)
+DROP POLICY IF EXISTS "Allow service role full access to archon_migrations" ON archon_migrations;
+DROP POLICY IF EXISTS "Allow authenticated users to read archon_migrations" ON archon_migrations;
+
+-- Create RLS policies for migrations table
+-- Service role has full access
+CREATE POLICY "Allow service role full access to archon_migrations" ON archon_migrations
+ FOR ALL USING (auth.role() = 'service_role');
+
+-- Authenticated users can only read migrations (they cannot modify migration history)
+CREATE POLICY "Allow authenticated users to read archon_migrations" ON archon_migrations
+ FOR SELECT TO authenticated
+ USING (true);
+
-- =====================================================
-- SECTION 8: PROMPTS TABLE
-- =====================================================
diff --git a/migration/upgrade_database.sql b/migration/upgrade_database.sql
deleted file mode 100644
index 30a4f486..00000000
--- a/migration/upgrade_database.sql
+++ /dev/null
@@ -1,518 +0,0 @@
--- ======================================================================
--- UPGRADE TO MODEL TRACKING AND MULTI-DIMENSIONAL EMBEDDINGS
--- ======================================================================
--- This migration upgrades existing Archon installations to support:
--- 1. Multi-dimensional embedding columns (768, 1024, 1536, 3072)
--- 2. Model tracking fields (llm_chat_model, embedding_model, embedding_dimension)
--- 3. 384-dimension support for smaller embedding models
--- 4. Enhanced search functions for multi-dimensional support
--- ======================================================================
---
--- IMPORTANT: Run this ONLY if you have an existing Archon installation
--- that was created BEFORE the multi-dimensional embedding support.
---
--- This script is SAFE to run multiple times - it uses IF NOT EXISTS checks.
--- ======================================================================
-
-BEGIN;
-
--- ======================================================================
--- SECTION 1: ADD MULTI-DIMENSIONAL EMBEDDING COLUMNS
--- ======================================================================
-
--- Add multi-dimensional embedding columns to archon_crawled_pages
-ALTER TABLE archon_crawled_pages
-ADD COLUMN IF NOT EXISTS embedding_384 VECTOR(384), -- Small embedding models
-ADD COLUMN IF NOT EXISTS embedding_768 VECTOR(768), -- Google/Ollama models
-ADD COLUMN IF NOT EXISTS embedding_1024 VECTOR(1024), -- Ollama large models
-ADD COLUMN IF NOT EXISTS embedding_1536 VECTOR(1536), -- OpenAI standard models
-ADD COLUMN IF NOT EXISTS embedding_3072 VECTOR(3072); -- OpenAI large models
-
--- Add multi-dimensional embedding columns to archon_code_examples
-ALTER TABLE archon_code_examples
-ADD COLUMN IF NOT EXISTS embedding_384 VECTOR(384), -- Small embedding models
-ADD COLUMN IF NOT EXISTS embedding_768 VECTOR(768), -- Google/Ollama models
-ADD COLUMN IF NOT EXISTS embedding_1024 VECTOR(1024), -- Ollama large models
-ADD COLUMN IF NOT EXISTS embedding_1536 VECTOR(1536), -- OpenAI standard models
-ADD COLUMN IF NOT EXISTS embedding_3072 VECTOR(3072); -- OpenAI large models
-
--- ======================================================================
--- SECTION 2: ADD MODEL TRACKING COLUMNS
--- ======================================================================
-
--- Add model tracking columns to archon_crawled_pages
-ALTER TABLE archon_crawled_pages
-ADD COLUMN IF NOT EXISTS llm_chat_model TEXT, -- LLM model used for processing (e.g., 'gpt-4', 'llama3:8b')
-ADD COLUMN IF NOT EXISTS embedding_model TEXT, -- Embedding model used (e.g., 'text-embedding-3-large', 'all-MiniLM-L6-v2')
-ADD COLUMN IF NOT EXISTS embedding_dimension INTEGER; -- Dimension of the embedding used (384, 768, 1024, 1536, 3072)
-
--- Add model tracking columns to archon_code_examples
-ALTER TABLE archon_code_examples
-ADD COLUMN IF NOT EXISTS llm_chat_model TEXT, -- LLM model used for processing (e.g., 'gpt-4', 'llama3:8b')
-ADD COLUMN IF NOT EXISTS embedding_model TEXT, -- Embedding model used (e.g., 'text-embedding-3-large', 'all-MiniLM-L6-v2')
-ADD COLUMN IF NOT EXISTS embedding_dimension INTEGER; -- Dimension of the embedding used (384, 768, 1024, 1536, 3072)
-
--- ======================================================================
--- SECTION 3: MIGRATE EXISTING EMBEDDING DATA
--- ======================================================================
-
--- Check if there's existing embedding data in old 'embedding' column
-DO $$
-DECLARE
- crawled_pages_count INTEGER;
- code_examples_count INTEGER;
- dimension_detected INTEGER;
-BEGIN
- -- Check if old embedding column exists and has data
- SELECT COUNT(*) INTO crawled_pages_count
- FROM information_schema.columns
- WHERE table_name = 'archon_crawled_pages'
- AND column_name = 'embedding';
-
- SELECT COUNT(*) INTO code_examples_count
- FROM information_schema.columns
- WHERE table_name = 'archon_code_examples'
- AND column_name = 'embedding';
-
- -- If old embedding columns exist, migrate the data
- IF crawled_pages_count > 0 THEN
- RAISE NOTICE 'Found existing embedding column in archon_crawled_pages - migrating data...';
-
- -- Detect dimension from first non-null embedding
- SELECT vector_dims(embedding) INTO dimension_detected
- FROM archon_crawled_pages
- WHERE embedding IS NOT NULL
- LIMIT 1;
-
- IF dimension_detected IS NOT NULL THEN
- RAISE NOTICE 'Detected embedding dimension: %', dimension_detected;
-
- -- Migrate based on detected dimension
- CASE dimension_detected
- WHEN 384 THEN
- UPDATE archon_crawled_pages
- SET embedding_384 = embedding,
- embedding_dimension = 384,
- embedding_model = COALESCE(embedding_model, 'legacy-384d-model')
- WHERE embedding IS NOT NULL AND embedding_384 IS NULL;
-
- WHEN 768 THEN
- UPDATE archon_crawled_pages
- SET embedding_768 = embedding,
- embedding_dimension = 768,
- embedding_model = COALESCE(embedding_model, 'legacy-768d-model')
- WHERE embedding IS NOT NULL AND embedding_768 IS NULL;
-
- WHEN 1024 THEN
- UPDATE archon_crawled_pages
- SET embedding_1024 = embedding,
- embedding_dimension = 1024,
- embedding_model = COALESCE(embedding_model, 'legacy-1024d-model')
- WHERE embedding IS NOT NULL AND embedding_1024 IS NULL;
-
- WHEN 1536 THEN
- UPDATE archon_crawled_pages
- SET embedding_1536 = embedding,
- embedding_dimension = 1536,
- embedding_model = COALESCE(embedding_model, 'text-embedding-3-small')
- WHERE embedding IS NOT NULL AND embedding_1536 IS NULL;
-
- WHEN 3072 THEN
- UPDATE archon_crawled_pages
- SET embedding_3072 = embedding,
- embedding_dimension = 3072,
- embedding_model = COALESCE(embedding_model, 'text-embedding-3-large')
- WHERE embedding IS NOT NULL AND embedding_3072 IS NULL;
-
- ELSE
- RAISE NOTICE 'Unsupported embedding dimension detected: %. Skipping migration.', dimension_detected;
- END CASE;
-
- RAISE NOTICE 'Migrated existing embeddings to dimension-specific columns';
- END IF;
- END IF;
-
- -- Migrate code examples if they exist
- IF code_examples_count > 0 THEN
- RAISE NOTICE 'Found existing embedding column in archon_code_examples - migrating data...';
-
- -- Detect dimension from first non-null embedding
- SELECT vector_dims(embedding) INTO dimension_detected
- FROM archon_code_examples
- WHERE embedding IS NOT NULL
- LIMIT 1;
-
- IF dimension_detected IS NOT NULL THEN
- RAISE NOTICE 'Detected code examples embedding dimension: %', dimension_detected;
-
- -- Migrate based on detected dimension
- CASE dimension_detected
- WHEN 384 THEN
- UPDATE archon_code_examples
- SET embedding_384 = embedding,
- embedding_dimension = 384,
- embedding_model = COALESCE(embedding_model, 'legacy-384d-model')
- WHERE embedding IS NOT NULL AND embedding_384 IS NULL;
-
- WHEN 768 THEN
- UPDATE archon_code_examples
- SET embedding_768 = embedding,
- embedding_dimension = 768,
- embedding_model = COALESCE(embedding_model, 'legacy-768d-model')
- WHERE embedding IS NOT NULL AND embedding_768 IS NULL;
-
- WHEN 1024 THEN
- UPDATE archon_code_examples
- SET embedding_1024 = embedding,
- embedding_dimension = 1024,
- embedding_model = COALESCE(embedding_model, 'legacy-1024d-model')
- WHERE embedding IS NOT NULL AND embedding_1024 IS NULL;
-
- WHEN 1536 THEN
- UPDATE archon_code_examples
- SET embedding_1536 = embedding,
- embedding_dimension = 1536,
- embedding_model = COALESCE(embedding_model, 'text-embedding-3-small')
- WHERE embedding IS NOT NULL AND embedding_1536 IS NULL;
-
- WHEN 3072 THEN
- UPDATE archon_code_examples
- SET embedding_3072 = embedding,
- embedding_dimension = 3072,
- embedding_model = COALESCE(embedding_model, 'text-embedding-3-large')
- WHERE embedding IS NOT NULL AND embedding_3072 IS NULL;
-
- ELSE
- RAISE NOTICE 'Unsupported code examples embedding dimension: %. Skipping migration.', dimension_detected;
- END CASE;
-
- RAISE NOTICE 'Migrated existing code example embeddings to dimension-specific columns';
- END IF;
- END IF;
-END $$;
-
--- ======================================================================
--- SECTION 4: CLEANUP LEGACY EMBEDDING COLUMNS
--- ======================================================================
-
--- Remove old embedding columns after successful migration
-DO $$
-DECLARE
- crawled_pages_count INTEGER;
- code_examples_count INTEGER;
-BEGIN
- -- Check if old embedding column exists in crawled pages
- SELECT COUNT(*) INTO crawled_pages_count
- FROM information_schema.columns
- WHERE table_name = 'archon_crawled_pages'
- AND column_name = 'embedding';
-
- -- Check if old embedding column exists in code examples
- SELECT COUNT(*) INTO code_examples_count
- FROM information_schema.columns
- WHERE table_name = 'archon_code_examples'
- AND column_name = 'embedding';
-
- -- Drop old embedding column from crawled pages if it exists
- IF crawled_pages_count > 0 THEN
- RAISE NOTICE 'Dropping legacy embedding column from archon_crawled_pages...';
- ALTER TABLE archon_crawled_pages DROP COLUMN embedding;
- RAISE NOTICE 'Successfully removed legacy embedding column from archon_crawled_pages';
- END IF;
-
- -- Drop old embedding column from code examples if it exists
- IF code_examples_count > 0 THEN
- RAISE NOTICE 'Dropping legacy embedding column from archon_code_examples...';
- ALTER TABLE archon_code_examples DROP COLUMN embedding;
- RAISE NOTICE 'Successfully removed legacy embedding column from archon_code_examples';
- END IF;
-
- -- Drop any indexes on the old embedding column if they exist
- DROP INDEX IF EXISTS idx_archon_crawled_pages_embedding;
- DROP INDEX IF EXISTS idx_archon_code_examples_embedding;
-
- RAISE NOTICE 'Legacy column cleanup completed';
-END $$;
-
--- ======================================================================
--- SECTION 5: CREATE OPTIMIZED INDEXES
--- ======================================================================
-
--- Create indexes for archon_crawled_pages (multi-dimensional support)
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_384
-ON archon_crawled_pages USING ivfflat (embedding_384 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_768
-ON archon_crawled_pages USING ivfflat (embedding_768 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_1024
-ON archon_crawled_pages USING ivfflat (embedding_1024 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_1536
-ON archon_crawled_pages USING ivfflat (embedding_1536 vector_cosine_ops)
-WITH (lists = 100);
-
--- Note: 3072-dimensional embeddings cannot have vector indexes due to PostgreSQL vector extension 2000 dimension limit
--- The embedding_3072 column exists but cannot be indexed with current pgvector version
--- Brute force search will be used for 3072-dimensional vectors
--- CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_3072
--- ON archon_crawled_pages USING hnsw (embedding_3072 vector_cosine_ops);
-
--- Create indexes for archon_code_examples (multi-dimensional support)
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_384
-ON archon_code_examples USING ivfflat (embedding_384 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_768
-ON archon_code_examples USING ivfflat (embedding_768 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_1024
-ON archon_code_examples USING ivfflat (embedding_1024 vector_cosine_ops)
-WITH (lists = 100);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_1536
-ON archon_code_examples USING ivfflat (embedding_1536 vector_cosine_ops)
-WITH (lists = 100);
-
--- Note: 3072-dimensional embeddings cannot have vector indexes due to PostgreSQL vector extension 2000 dimension limit
--- The embedding_3072 column exists but cannot be indexed with current pgvector version
--- Brute force search will be used for 3072-dimensional vectors
--- CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_3072
--- ON archon_code_examples USING hnsw (embedding_3072 vector_cosine_ops);
-
--- Create indexes for model tracking columns
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_model
-ON archon_crawled_pages (embedding_model);
-
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_embedding_dimension
-ON archon_crawled_pages (embedding_dimension);
-
-CREATE INDEX IF NOT EXISTS idx_archon_crawled_pages_llm_chat_model
-ON archon_crawled_pages (llm_chat_model);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_model
-ON archon_code_examples (embedding_model);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_embedding_dimension
-ON archon_code_examples (embedding_dimension);
-
-CREATE INDEX IF NOT EXISTS idx_archon_code_examples_llm_chat_model
-ON archon_code_examples (llm_chat_model);
-
--- ======================================================================
--- SECTION 6: HELPER FUNCTIONS FOR MULTI-DIMENSIONAL SUPPORT
--- ======================================================================
-
--- Function to detect embedding dimension from vector
-CREATE OR REPLACE FUNCTION detect_embedding_dimension(embedding_vector vector)
-RETURNS INTEGER AS $$
-BEGIN
- RETURN vector_dims(embedding_vector);
-END;
-$$ LANGUAGE plpgsql IMMUTABLE;
-
--- Function to get the appropriate column name for a dimension
-CREATE OR REPLACE FUNCTION get_embedding_column_name(dimension INTEGER)
-RETURNS TEXT AS $$
-BEGIN
- CASE dimension
- WHEN 384 THEN RETURN 'embedding_384';
- WHEN 768 THEN RETURN 'embedding_768';
- WHEN 1024 THEN RETURN 'embedding_1024';
- WHEN 1536 THEN RETURN 'embedding_1536';
- WHEN 3072 THEN RETURN 'embedding_3072';
- ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %. Supported dimensions are: 384, 768, 1024, 1536, 3072', dimension;
- END CASE;
-END;
-$$ LANGUAGE plpgsql IMMUTABLE;
-
--- ======================================================================
--- SECTION 7: ENHANCED SEARCH FUNCTIONS
--- ======================================================================
-
--- Create multi-dimensional function to search for documentation chunks
-CREATE OR REPLACE FUNCTION match_archon_crawled_pages_multi (
- query_embedding VECTOR,
- embedding_dimension INTEGER,
- match_count INT DEFAULT 10,
- filter JSONB DEFAULT '{}'::jsonb,
- source_filter TEXT DEFAULT NULL
-) RETURNS TABLE (
- id BIGINT,
- url VARCHAR,
- chunk_number INTEGER,
- content TEXT,
- metadata JSONB,
- source_id TEXT,
- similarity FLOAT
-)
-LANGUAGE plpgsql
-AS $$
-#variable_conflict use_column
-DECLARE
- sql_query TEXT;
- embedding_column TEXT;
-BEGIN
- -- Determine which embedding column to use based on dimension
- CASE embedding_dimension
- WHEN 384 THEN embedding_column := 'embedding_384';
- WHEN 768 THEN embedding_column := 'embedding_768';
- WHEN 1024 THEN embedding_column := 'embedding_1024';
- WHEN 1536 THEN embedding_column := 'embedding_1536';
- WHEN 3072 THEN embedding_column := 'embedding_3072';
- ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %', embedding_dimension;
- END CASE;
-
- -- Build dynamic query
- sql_query := format('
- SELECT id, url, chunk_number, content, metadata, source_id,
- 1 - (%I <=> $1) AS similarity
- FROM archon_crawled_pages
- WHERE (%I IS NOT NULL)
- AND metadata @> $3
- AND ($4 IS NULL OR source_id = $4)
- ORDER BY %I <=> $1
- LIMIT $2',
- embedding_column, embedding_column, embedding_column);
-
- -- Execute dynamic query
- RETURN QUERY EXECUTE sql_query USING query_embedding, match_count, filter, source_filter;
-END;
-$$;
-
--- Create multi-dimensional function to search for code examples
-CREATE OR REPLACE FUNCTION match_archon_code_examples_multi (
- query_embedding VECTOR,
- embedding_dimension INTEGER,
- match_count INT DEFAULT 10,
- filter JSONB DEFAULT '{}'::jsonb,
- source_filter TEXT DEFAULT NULL
-) RETURNS TABLE (
- id BIGINT,
- url VARCHAR,
- chunk_number INTEGER,
- content TEXT,
- summary TEXT,
- metadata JSONB,
- source_id TEXT,
- similarity FLOAT
-)
-LANGUAGE plpgsql
-AS $$
-#variable_conflict use_column
-DECLARE
- sql_query TEXT;
- embedding_column TEXT;
-BEGIN
- -- Determine which embedding column to use based on dimension
- CASE embedding_dimension
- WHEN 384 THEN embedding_column := 'embedding_384';
- WHEN 768 THEN embedding_column := 'embedding_768';
- WHEN 1024 THEN embedding_column := 'embedding_1024';
- WHEN 1536 THEN embedding_column := 'embedding_1536';
- WHEN 3072 THEN embedding_column := 'embedding_3072';
- ELSE RAISE EXCEPTION 'Unsupported embedding dimension: %', embedding_dimension;
- END CASE;
-
- -- Build dynamic query
- sql_query := format('
- SELECT id, url, chunk_number, content, summary, metadata, source_id,
- 1 - (%I <=> $1) AS similarity
- FROM archon_code_examples
- WHERE (%I IS NOT NULL)
- AND metadata @> $3
- AND ($4 IS NULL OR source_id = $4)
- ORDER BY %I <=> $1
- LIMIT $2',
- embedding_column, embedding_column, embedding_column);
-
- -- Execute dynamic query
- RETURN QUERY EXECUTE sql_query USING query_embedding, match_count, filter, source_filter;
-END;
-$$;
-
--- ======================================================================
--- SECTION 8: LEGACY COMPATIBILITY FUNCTIONS
--- ======================================================================
-
--- Legacy compatibility function for crawled pages (defaults to 1536D)
-CREATE OR REPLACE FUNCTION match_archon_crawled_pages (
- query_embedding VECTOR(1536),
- match_count INT DEFAULT 10,
- filter JSONB DEFAULT '{}'::jsonb,
- source_filter TEXT DEFAULT NULL
-) RETURNS TABLE (
- id BIGINT,
- url VARCHAR,
- chunk_number INTEGER,
- content TEXT,
- metadata JSONB,
- source_id TEXT,
- similarity FLOAT
-)
-LANGUAGE plpgsql
-AS $$
-BEGIN
- RETURN QUERY SELECT * FROM match_archon_crawled_pages_multi(query_embedding, 1536, match_count, filter, source_filter);
-END;
-$$;
-
--- Legacy compatibility function for code examples (defaults to 1536D)
-CREATE OR REPLACE FUNCTION match_archon_code_examples (
- query_embedding VECTOR(1536),
- match_count INT DEFAULT 10,
- filter JSONB DEFAULT '{}'::jsonb,
- source_filter TEXT DEFAULT NULL
-) RETURNS TABLE (
- id BIGINT,
- url VARCHAR,
- chunk_number INTEGER,
- content TEXT,
- summary TEXT,
- metadata JSONB,
- source_id TEXT,
- similarity FLOAT
-)
-LANGUAGE plpgsql
-AS $$
-BEGIN
- RETURN QUERY SELECT * FROM match_archon_code_examples_multi(query_embedding, 1536, match_count, filter, source_filter);
-END;
-$$;
-
-COMMIT;
-
--- ======================================================================
--- MIGRATION COMPLETE - SUPABASE-FRIENDLY STATUS REPORT
--- ======================================================================
--- This final SELECT statement consolidates all status information for
--- display in Supabase SQL Editor (users only see the last query result)
-
-SELECT
- '🎉 ARCHON MODEL TRACKING UPGRADE COMPLETED! 🎉' AS status,
- 'Successfully upgraded your Archon installation' AS message,
- ARRAY[
- '✅ Multi-dimensional embedding support (384, 768, 1024, 1536, 3072)',
- '✅ Model tracking fields (llm_chat_model, embedding_model, embedding_dimension)',
- '✅ Optimized indexes for improved search performance',
- '✅ Enhanced search functions with dimension-aware querying',
- '✅ Legacy compatibility maintained for existing code',
- '✅ Existing embedding data migrated (if any was found)',
- '✅ Support for 3072-dimensional vectors (using brute force search)'
- ] AS features_added,
- ARRAY[
- '• Multiple embedding providers (OpenAI, Ollama, Google, etc.)',
- '• Automatic model detection and tracking',
- '• Improved search accuracy with dimension-specific indexing',
- '• Full audit trail of which models processed your data'
- ] AS capabilities_enabled,
- ARRAY[
- '1. Restart your Archon services: docker compose restart',
- '2. New crawls will automatically use the enhanced features',
- '3. Check the Settings page to configure your preferred models',
- '4. Run validate_migration.sql to verify everything works'
- ] AS next_steps;
\ No newline at end of file
diff --git a/migration/validate_migration.sql b/migration/validate_migration.sql
deleted file mode 100644
index 3ff31924..00000000
--- a/migration/validate_migration.sql
+++ /dev/null
@@ -1,287 +0,0 @@
--- ======================================================================
--- ARCHON MIGRATION VALIDATION SCRIPT
--- ======================================================================
--- This script validates that the upgrade_to_model_tracking.sql migration
--- completed successfully and all features are working.
--- ======================================================================
-
-DO $$
-DECLARE
- crawled_pages_columns INTEGER := 0;
- code_examples_columns INTEGER := 0;
- crawled_pages_indexes INTEGER := 0;
- code_examples_indexes INTEGER := 0;
- functions_count INTEGER := 0;
- migration_success BOOLEAN := TRUE;
- error_messages TEXT := '';
-BEGIN
- RAISE NOTICE '====================================================================';
- RAISE NOTICE ' VALIDATING ARCHON MIGRATION RESULTS';
- RAISE NOTICE '====================================================================';
-
- -- Check if required columns exist in archon_crawled_pages
- SELECT COUNT(*) INTO crawled_pages_columns
- FROM information_schema.columns
- WHERE table_name = 'archon_crawled_pages'
- AND column_name IN (
- 'embedding_384', 'embedding_768', 'embedding_1024', 'embedding_1536', 'embedding_3072',
- 'llm_chat_model', 'embedding_model', 'embedding_dimension'
- );
-
- -- Check if required columns exist in archon_code_examples
- SELECT COUNT(*) INTO code_examples_columns
- FROM information_schema.columns
- WHERE table_name = 'archon_code_examples'
- AND column_name IN (
- 'embedding_384', 'embedding_768', 'embedding_1024', 'embedding_1536', 'embedding_3072',
- 'llm_chat_model', 'embedding_model', 'embedding_dimension'
- );
-
- -- Check if indexes were created for archon_crawled_pages
- SELECT COUNT(*) INTO crawled_pages_indexes
- FROM pg_indexes
- WHERE tablename = 'archon_crawled_pages'
- AND indexname IN (
- 'idx_archon_crawled_pages_embedding_384',
- 'idx_archon_crawled_pages_embedding_768',
- 'idx_archon_crawled_pages_embedding_1024',
- 'idx_archon_crawled_pages_embedding_1536',
- 'idx_archon_crawled_pages_embedding_model',
- 'idx_archon_crawled_pages_embedding_dimension',
- 'idx_archon_crawled_pages_llm_chat_model'
- );
-
- -- Check if indexes were created for archon_code_examples
- SELECT COUNT(*) INTO code_examples_indexes
- FROM pg_indexes
- WHERE tablename = 'archon_code_examples'
- AND indexname IN (
- 'idx_archon_code_examples_embedding_384',
- 'idx_archon_code_examples_embedding_768',
- 'idx_archon_code_examples_embedding_1024',
- 'idx_archon_code_examples_embedding_1536',
- 'idx_archon_code_examples_embedding_model',
- 'idx_archon_code_examples_embedding_dimension',
- 'idx_archon_code_examples_llm_chat_model'
- );
-
- -- Check if required functions exist
- SELECT COUNT(*) INTO functions_count
- FROM information_schema.routines
- WHERE routine_name IN (
- 'match_archon_crawled_pages_multi',
- 'match_archon_code_examples_multi',
- 'detect_embedding_dimension',
- 'get_embedding_column_name'
- );
-
- -- Validate results
- RAISE NOTICE 'COLUMN VALIDATION:';
- IF crawled_pages_columns = 8 THEN
- RAISE NOTICE '✅ archon_crawled_pages: All 8 required columns found';
- ELSE
- RAISE NOTICE '❌ archon_crawled_pages: Expected 8 columns, found %', crawled_pages_columns;
- migration_success := FALSE;
- error_messages := error_messages || '• Missing columns in archon_crawled_pages' || chr(10);
- END IF;
-
- IF code_examples_columns = 8 THEN
- RAISE NOTICE '✅ archon_code_examples: All 8 required columns found';
- ELSE
- RAISE NOTICE '❌ archon_code_examples: Expected 8 columns, found %', code_examples_columns;
- migration_success := FALSE;
- error_messages := error_messages || '• Missing columns in archon_code_examples' || chr(10);
- END IF;
-
- RAISE NOTICE '';
- RAISE NOTICE 'INDEX VALIDATION:';
- IF crawled_pages_indexes >= 6 THEN
- RAISE NOTICE '✅ archon_crawled_pages: % indexes created (expected 6+)', crawled_pages_indexes;
- ELSE
- RAISE NOTICE '⚠️ archon_crawled_pages: % indexes created (expected 6+)', crawled_pages_indexes;
- RAISE NOTICE ' Note: Some indexes may have failed due to resource constraints - this is OK';
- END IF;
-
- IF code_examples_indexes >= 6 THEN
- RAISE NOTICE '✅ archon_code_examples: % indexes created (expected 6+)', code_examples_indexes;
- ELSE
- RAISE NOTICE '⚠️ archon_code_examples: % indexes created (expected 6+)', code_examples_indexes;
- RAISE NOTICE ' Note: Some indexes may have failed due to resource constraints - this is OK';
- END IF;
-
- RAISE NOTICE '';
- RAISE NOTICE 'FUNCTION VALIDATION:';
- IF functions_count = 4 THEN
- RAISE NOTICE '✅ All 4 required functions created successfully';
- ELSE
- RAISE NOTICE '❌ Expected 4 functions, found %', functions_count;
- migration_success := FALSE;
- error_messages := error_messages || '• Missing database functions' || chr(10);
- END IF;
-
- -- Test function functionality
- BEGIN
- PERFORM detect_embedding_dimension(ARRAY[1,2,3]::vector);
- RAISE NOTICE '✅ detect_embedding_dimension function working';
- EXCEPTION WHEN OTHERS THEN
- RAISE NOTICE '❌ detect_embedding_dimension function failed: %', SQLERRM;
- migration_success := FALSE;
- error_messages := error_messages || '• detect_embedding_dimension function not working' || chr(10);
- END;
-
- BEGIN
- PERFORM get_embedding_column_name(1536);
- RAISE NOTICE '✅ get_embedding_column_name function working';
- EXCEPTION WHEN OTHERS THEN
- RAISE NOTICE '❌ get_embedding_column_name function failed: %', SQLERRM;
- migration_success := FALSE;
- error_messages := error_messages || '• get_embedding_column_name function not working' || chr(10);
- END;
-
- RAISE NOTICE '';
- RAISE NOTICE '====================================================================';
-
- IF migration_success THEN
- RAISE NOTICE '🎉 MIGRATION VALIDATION SUCCESSFUL!';
- RAISE NOTICE '';
- RAISE NOTICE 'Your Archon installation has been successfully upgraded with:';
- RAISE NOTICE '✅ Multi-dimensional embedding support';
- RAISE NOTICE '✅ Model tracking capabilities';
- RAISE NOTICE '✅ Enhanced search functions';
- RAISE NOTICE '✅ Optimized database indexes';
- RAISE NOTICE '';
- RAISE NOTICE 'Next steps:';
- RAISE NOTICE '1. Restart your Archon services: docker compose restart';
- RAISE NOTICE '2. Test with a small crawl to verify functionality';
- RAISE NOTICE '3. Configure your preferred models in Settings';
- ELSE
- RAISE NOTICE '❌ MIGRATION VALIDATION FAILED!';
- RAISE NOTICE '';
- RAISE NOTICE 'Issues found:';
- RAISE NOTICE '%', error_messages;
- RAISE NOTICE 'Please check the migration logs and re-run if necessary.';
- END IF;
-
- RAISE NOTICE '====================================================================';
-
- -- Show sample of existing data if any
- DECLARE
- sample_count INTEGER;
- r RECORD; -- Declare the loop variable as RECORD type
- BEGIN
- SELECT COUNT(*) INTO sample_count FROM archon_crawled_pages LIMIT 1;
- IF sample_count > 0 THEN
- RAISE NOTICE '';
- RAISE NOTICE 'SAMPLE DATA CHECK:';
-
- -- Show one record with the new columns
- FOR r IN
- SELECT url, embedding_model, embedding_dimension,
- CASE WHEN llm_chat_model IS NOT NULL THEN '✅' ELSE '⚪' END as llm_status,
- CASE WHEN embedding_384 IS NOT NULL THEN '✅ 384'
- WHEN embedding_768 IS NOT NULL THEN '✅ 768'
- WHEN embedding_1024 IS NOT NULL THEN '✅ 1024'
- WHEN embedding_1536 IS NOT NULL THEN '✅ 1536'
- WHEN embedding_3072 IS NOT NULL THEN '✅ 3072'
- ELSE '⚪ None' END as embedding_status
- FROM archon_crawled_pages
- LIMIT 3
- LOOP
- RAISE NOTICE 'Record: % | Model: % | Dimension: % | LLM: % | Embedding: %',
- substring(r.url from 1 for 40),
- COALESCE(r.embedding_model, 'None'),
- COALESCE(r.embedding_dimension::text, 'None'),
- r.llm_status,
- r.embedding_status;
- END LOOP;
- END IF;
- END;
-
-END $$;
-
--- ======================================================================
--- VALIDATION COMPLETE - SUPABASE-FRIENDLY STATUS REPORT
--- ======================================================================
--- This final SELECT statement consolidates validation results for
--- display in Supabase SQL Editor (users only see the last query result)
-
-WITH validation_results AS (
- -- Check if all required columns exist
- SELECT
- COUNT(*) FILTER (WHERE column_name IN ('embedding_384', 'embedding_768', 'embedding_1024', 'embedding_1536', 'embedding_3072')) as embedding_columns,
- COUNT(*) FILTER (WHERE column_name IN ('llm_chat_model', 'embedding_model', 'embedding_dimension')) as tracking_columns
- FROM information_schema.columns
- WHERE table_name = 'archon_crawled_pages'
-),
-function_check AS (
- -- Check if required functions exist
- SELECT
- COUNT(*) FILTER (WHERE routine_name IN ('match_archon_crawled_pages_multi', 'match_archon_code_examples_multi', 'detect_embedding_dimension', 'get_embedding_column_name')) as functions_count
- FROM information_schema.routines
- WHERE routine_type = 'FUNCTION'
-),
-index_check AS (
- -- Check if indexes exist
- SELECT
- COUNT(*) FILTER (WHERE indexname LIKE '%embedding_%') as embedding_indexes
- FROM pg_indexes
- WHERE tablename IN ('archon_crawled_pages', 'archon_code_examples')
-),
-data_sample AS (
- -- Get sample of data with new columns
- SELECT
- COUNT(*) as total_records,
- COUNT(*) FILTER (WHERE embedding_model IS NOT NULL) as records_with_model_tracking,
- COUNT(*) FILTER (WHERE embedding_384 IS NOT NULL OR embedding_768 IS NOT NULL OR embedding_1024 IS NOT NULL OR embedding_1536 IS NOT NULL OR embedding_3072 IS NOT NULL) as records_with_multi_dim_embeddings
- FROM archon_crawled_pages
-),
-overall_status AS (
- SELECT
- CASE
- WHEN v.embedding_columns = 5 AND v.tracking_columns = 3 AND f.functions_count >= 4 AND i.embedding_indexes > 0
- THEN '✅ MIGRATION VALIDATION SUCCESSFUL!'
- ELSE '❌ MIGRATION VALIDATION FAILED!'
- END as status,
- v.embedding_columns,
- v.tracking_columns,
- f.functions_count,
- i.embedding_indexes,
- d.total_records,
- d.records_with_model_tracking,
- d.records_with_multi_dim_embeddings
- FROM validation_results v, function_check f, index_check i, data_sample d
-)
-SELECT
- status,
- CASE
- WHEN embedding_columns = 5 AND tracking_columns = 3 AND functions_count >= 4 AND embedding_indexes > 0
- THEN 'All validation checks passed successfully'
- ELSE 'Some validation checks failed - please review the results'
- END as message,
- json_build_object(
- 'embedding_columns_added', embedding_columns || '/5',
- 'tracking_columns_added', tracking_columns || '/3',
- 'search_functions_created', functions_count || '+ functions',
- 'embedding_indexes_created', embedding_indexes || '+ indexes'
- ) as technical_validation,
- json_build_object(
- 'total_records', total_records,
- 'records_with_model_tracking', records_with_model_tracking,
- 'records_with_multi_dimensional_embeddings', records_with_multi_dim_embeddings
- ) as data_status,
- CASE
- WHEN embedding_columns = 5 AND tracking_columns = 3 AND functions_count >= 4 AND embedding_indexes > 0
- THEN ARRAY[
- '1. Restart Archon services: docker compose restart',
- '2. Test with a small crawl to verify functionality',
- '3. Configure your preferred models in Settings',
- '4. New crawls will automatically use model tracking'
- ]
- ELSE ARRAY[
- '1. Check migration logs for specific errors',
- '2. Re-run upgrade_database.sql if needed',
- '3. Ensure database has sufficient permissions',
- '4. Contact support if issues persist'
- ]
- END as next_steps
-FROM overall_status;
\ No newline at end of file
diff --git a/python/src/server/api_routes/migration_api.py b/python/src/server/api_routes/migration_api.py
new file mode 100644
index 00000000..fec04d24
--- /dev/null
+++ b/python/src/server/api_routes/migration_api.py
@@ -0,0 +1,170 @@
+"""
+API routes for database migration tracking and management.
+"""
+
+from datetime import datetime
+
+import logfire
+from fastapi import APIRouter, Header, HTTPException, Response
+from pydantic import BaseModel
+
+from ..config.version import ARCHON_VERSION
+from ..services.migration_service import migration_service
+from ..utils.etag_utils import check_etag, generate_etag
+
+
+# Response models
+class MigrationRecord(BaseModel):
+ """Represents an applied migration."""
+
+ version: str
+ migration_name: str
+ applied_at: datetime
+ checksum: str | None = None
+
+
+class PendingMigration(BaseModel):
+ """Represents a pending migration."""
+
+ version: str
+ name: str
+ sql_content: str
+ file_path: str
+ checksum: str | None = None
+
+
+class MigrationStatusResponse(BaseModel):
+ """Complete migration status response."""
+
+ pending_migrations: list[PendingMigration]
+ applied_migrations: list[MigrationRecord]
+ has_pending: bool
+ bootstrap_required: bool
+ current_version: str
+ pending_count: int
+ applied_count: int
+
+
+class MigrationHistoryResponse(BaseModel):
+ """Migration history response."""
+
+ migrations: list[MigrationRecord]
+ total_count: int
+ current_version: str
+
+
+# Create router
+router = APIRouter(prefix="/api/migrations", tags=["migrations"])
+
+
+@router.get("/status", response_model=MigrationStatusResponse)
+async def get_migration_status(
+ response: Response, if_none_match: str | None = Header(None)
+):
+ """
+ Get current migration status including pending and applied migrations.
+
+ Returns comprehensive migration status with:
+ - List of pending migrations with SQL content
+ - List of applied migrations
+ - Bootstrap flag if migrations table doesn't exist
+ - Current version information
+ """
+ try:
+ # Get migration status from service
+ status = await migration_service.get_migration_status()
+
+ # Generate ETag for response
+ etag = generate_etag(status)
+
+ # Check if client has current data
+ if check_etag(if_none_match, etag):
+ # Client has current data, return 304
+ response.status_code = 304
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return Response(status_code=304)
+ else:
+ # Client needs new data
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return MigrationStatusResponse(**status)
+
+ except Exception as e:
+ logfire.error(f"Error getting migration status: {e}")
+ raise HTTPException(status_code=500, detail=f"Failed to get migration status: {str(e)}") from e
+
+
+@router.get("/history", response_model=MigrationHistoryResponse)
+async def get_migration_history(response: Response, if_none_match: str | None = Header(None)):
+ """
+ Get history of applied migrations.
+
+ Returns list of all applied migrations sorted by date.
+ """
+ try:
+ # Get applied migrations from service
+ applied = await migration_service.get_applied_migrations()
+
+ # Format response
+ history = {
+ "migrations": [
+ MigrationRecord(
+ version=m.version,
+ migration_name=m.migration_name,
+ applied_at=m.applied_at,
+ checksum=m.checksum,
+ )
+ for m in applied
+ ],
+ "total_count": len(applied),
+ "current_version": ARCHON_VERSION,
+ }
+
+ # Generate ETag for response
+ etag = generate_etag(history)
+
+ # Check if client has current data
+ if check_etag(if_none_match, etag):
+ # Client has current data, return 304
+ response.status_code = 304
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return Response(status_code=304)
+ else:
+ # Client needs new data
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return MigrationHistoryResponse(**history)
+
+ except Exception as e:
+ logfire.error(f"Error getting migration history: {e}")
+ raise HTTPException(status_code=500, detail=f"Failed to get migration history: {str(e)}") from e
+
+
+@router.get("/pending", response_model=list[PendingMigration])
+async def get_pending_migrations():
+ """
+ Get list of pending migrations only.
+
+ Returns simplified list of migrations that need to be applied.
+ """
+ try:
+ # Get pending migrations from service
+ pending = await migration_service.get_pending_migrations()
+
+ # Format response
+ return [
+ PendingMigration(
+ version=m.version,
+ name=m.name,
+ sql_content=m.sql_content,
+ file_path=m.file_path,
+ checksum=m.checksum,
+ )
+ for m in pending
+ ]
+
+ except Exception as e:
+ logfire.error(f"Error getting pending migrations: {e}")
+ raise HTTPException(status_code=500, detail=f"Failed to get pending migrations: {str(e)}") from e
diff --git a/python/src/server/api_routes/version_api.py b/python/src/server/api_routes/version_api.py
new file mode 100644
index 00000000..ebfd306f
--- /dev/null
+++ b/python/src/server/api_routes/version_api.py
@@ -0,0 +1,121 @@
+"""
+API routes for version checking and update management.
+"""
+
+from datetime import datetime
+from typing import Any
+
+import logfire
+from fastapi import APIRouter, Header, HTTPException, Response
+from pydantic import BaseModel
+
+from ..config.version import ARCHON_VERSION
+from ..services.version_service import version_service
+from ..utils.etag_utils import check_etag, generate_etag
+
+
+# Response models
+class ReleaseAsset(BaseModel):
+ """Represents a downloadable asset from a release."""
+
+ name: str
+ size: int
+ download_count: int
+ browser_download_url: str
+ content_type: str
+
+
+class VersionCheckResponse(BaseModel):
+ """Version check response with update information."""
+
+ current: str
+ latest: str | None
+ update_available: bool
+ release_url: str | None
+ release_notes: str | None
+ published_at: datetime | None
+ check_error: str | None = None
+ assets: list[dict[str, Any]] | None = None
+ author: str | None = None
+
+
+class CurrentVersionResponse(BaseModel):
+ """Simple current version response."""
+
+ version: str
+ timestamp: datetime
+
+
+# Create router
+router = APIRouter(prefix="/api/version", tags=["version"])
+
+
+@router.get("/check", response_model=VersionCheckResponse)
+async def check_for_updates(response: Response, if_none_match: str | None = Header(None)):
+ """
+ Check for available Archon updates.
+
+ Queries GitHub releases API to determine if a newer version is available.
+ Results are cached for 1 hour to avoid rate limiting.
+
+ Returns:
+ Version information including current, latest, and update availability
+ """
+ try:
+ # Get version check results from service
+ result = await version_service.check_for_updates()
+
+ # Generate ETag for response
+ etag = generate_etag(result)
+
+ # Check if client has current data
+ if check_etag(if_none_match, etag):
+ # Client has current data, return 304
+ response.status_code = 304
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return Response(status_code=304)
+ else:
+ # Client needs new data
+ response.headers["ETag"] = f'"{etag}"'
+ response.headers["Cache-Control"] = "no-cache, must-revalidate"
+ return VersionCheckResponse(**result)
+
+ except Exception as e:
+ logfire.error(f"Error checking for updates: {e}")
+ # Return safe response with error
+ return VersionCheckResponse(
+ current=ARCHON_VERSION,
+ latest=None,
+ update_available=False,
+ release_url=None,
+ release_notes=None,
+ published_at=None,
+ check_error=str(e),
+ )
+
+
+@router.get("/current", response_model=CurrentVersionResponse)
+async def get_current_version():
+ """
+ Get the current Archon version.
+
+ Simple endpoint that returns the installed version without checking for updates.
+ """
+ return CurrentVersionResponse(version=ARCHON_VERSION, timestamp=datetime.now())
+
+
+@router.post("/clear-cache")
+async def clear_version_cache():
+ """
+ Clear the version check cache.
+
+ Forces the next version check to query GitHub API instead of using cached data.
+ Useful for testing or forcing an immediate update check.
+ """
+ try:
+ version_service.clear_cache()
+ return {"message": "Version cache cleared successfully", "success": True}
+ except Exception as e:
+ logfire.error(f"Error clearing version cache: {e}")
+ raise HTTPException(status_code=500, detail=f"Failed to clear cache: {str(e)}") from e
diff --git a/python/src/server/config/version.py b/python/src/server/config/version.py
new file mode 100644
index 00000000..97b74302
--- /dev/null
+++ b/python/src/server/config/version.py
@@ -0,0 +1,11 @@
+"""
+Version configuration for Archon.
+"""
+
+# Current version of Archon
+# Update this with each release
+ARCHON_VERSION = "0.1.0"
+
+# Repository information for GitHub API
+GITHUB_REPO_OWNER = "coleam00"
+GITHUB_REPO_NAME = "Archon"
diff --git a/python/src/server/main.py b/python/src/server/main.py
index ba0b19cb..19456e06 100644
--- a/python/src/server/main.py
+++ b/python/src/server/main.py
@@ -23,10 +23,12 @@ from .api_routes.bug_report_api import router as bug_report_router
from .api_routes.internal_api import router as internal_router
from .api_routes.knowledge_api import router as knowledge_router
from .api_routes.mcp_api import router as mcp_router
+from .api_routes.migration_api import router as migration_router
from .api_routes.ollama_api import router as ollama_router
from .api_routes.progress_api import router as progress_router
from .api_routes.projects_api import router as projects_router
from .api_routes.providers_api import router as providers_router
+from .api_routes.version_api import router as version_router
# Import modular API routers
from .api_routes.settings_api import router as settings_router
@@ -188,6 +190,8 @@ app.include_router(agent_chat_router)
app.include_router(internal_router)
app.include_router(bug_report_router)
app.include_router(providers_router)
+app.include_router(version_router)
+app.include_router(migration_router)
# Root endpoint
diff --git a/python/src/server/services/migration_service.py b/python/src/server/services/migration_service.py
new file mode 100644
index 00000000..f47a4d68
--- /dev/null
+++ b/python/src/server/services/migration_service.py
@@ -0,0 +1,233 @@
+"""
+Database migration tracking and management service.
+"""
+
+import hashlib
+from pathlib import Path
+from typing import Any
+
+import logfire
+from supabase import Client
+
+from .client_manager import get_supabase_client
+from ..config.version import ARCHON_VERSION
+
+
+class MigrationRecord:
+ """Represents a migration record from the database."""
+
+ def __init__(self, data: dict[str, Any]):
+ self.id = data.get("id")
+ self.version = data.get("version")
+ self.migration_name = data.get("migration_name")
+ self.applied_at = data.get("applied_at")
+ self.checksum = data.get("checksum")
+
+
+class PendingMigration:
+ """Represents a pending migration from the filesystem."""
+
+ def __init__(self, version: str, name: str, sql_content: str, file_path: str):
+ self.version = version
+ self.name = name
+ self.sql_content = sql_content
+ self.file_path = file_path
+ self.checksum = self._calculate_checksum(sql_content)
+
+ def _calculate_checksum(self, content: str) -> str:
+ """Calculate MD5 checksum of migration content."""
+ return hashlib.md5(content.encode()).hexdigest()
+
+
+class MigrationService:
+ """Service for managing database migrations."""
+
+ def __init__(self):
+ self._supabase: Client | None = None
+ # Handle both Docker (/app/migration) and local (./migration) environments
+ if Path("/app/migration").exists():
+ self._migrations_dir = Path("/app/migration")
+ else:
+ self._migrations_dir = Path("migration")
+
+ def _get_supabase_client(self) -> Client:
+ """Get or create Supabase client."""
+ if not self._supabase:
+ self._supabase = get_supabase_client()
+ return self._supabase
+
+ async def check_migrations_table_exists(self) -> bool:
+ """
+ Check if the archon_migrations table exists in the database.
+
+ Returns:
+ True if table exists, False otherwise
+ """
+ try:
+ supabase = self._get_supabase_client()
+
+ # Query to check if table exists
+ result = supabase.rpc(
+ "sql",
+ {
+ "query": """
+ SELECT EXISTS (
+ SELECT 1
+ FROM information_schema.tables
+ WHERE table_schema = 'public'
+ AND table_name = 'archon_migrations'
+ ) as exists
+ """
+ }
+ ).execute()
+
+ # Check if result indicates table exists
+ if result.data and len(result.data) > 0:
+ return result.data[0].get("exists", False)
+ return False
+ except Exception:
+ # If the SQL function doesn't exist or query fails, try direct query
+ try:
+ supabase = self._get_supabase_client()
+ # Try to select from the table with limit 0
+ supabase.table("archon_migrations").select("id").limit(0).execute()
+ return True
+ except Exception as e:
+ logfire.info(f"Migrations table does not exist: {e}")
+ return False
+
+ async def get_applied_migrations(self) -> list[MigrationRecord]:
+ """
+ Get list of applied migrations from the database.
+
+ Returns:
+ List of MigrationRecord objects
+ """
+ try:
+ # Check if table exists first
+ if not await self.check_migrations_table_exists():
+ logfire.info("Migrations table does not exist, returning empty list")
+ return []
+
+ supabase = self._get_supabase_client()
+ result = supabase.table("archon_migrations").select("*").order("applied_at", desc=True).execute()
+
+ return [MigrationRecord(row) for row in result.data]
+ except Exception as e:
+ logfire.error(f"Error fetching applied migrations: {e}")
+ # Return empty list if we can't fetch migrations
+ return []
+
+ async def scan_migration_directory(self) -> list[PendingMigration]:
+ """
+ Scan the migration directory for all SQL files.
+
+ Returns:
+ List of PendingMigration objects
+ """
+ migrations = []
+
+ if not self._migrations_dir.exists():
+ logfire.warning(f"Migration directory does not exist: {self._migrations_dir}")
+ return migrations
+
+ # Scan all version directories
+ for version_dir in sorted(self._migrations_dir.iterdir()):
+ if not version_dir.is_dir():
+ continue
+
+ version = version_dir.name
+
+ # Scan all SQL files in version directory
+ for sql_file in sorted(version_dir.glob("*.sql")):
+ try:
+ # Read SQL content
+ with open(sql_file, encoding="utf-8") as f:
+ sql_content = f.read()
+
+ # Extract migration name (filename without extension)
+ migration_name = sql_file.stem
+
+ # Create pending migration object
+ migration = PendingMigration(
+ version=version,
+ name=migration_name,
+ sql_content=sql_content,
+ file_path=str(sql_file.relative_to(Path.cwd())),
+ )
+ migrations.append(migration)
+ except Exception as e:
+ logfire.error(f"Error reading migration file {sql_file}: {e}")
+
+ return migrations
+
+ async def get_pending_migrations(self) -> list[PendingMigration]:
+ """
+ Get list of pending migrations by comparing filesystem with database.
+
+ Returns:
+ List of PendingMigration objects that haven't been applied
+ """
+ # Get all migrations from filesystem
+ all_migrations = await self.scan_migration_directory()
+
+ # Check if migrations table exists
+ if not await self.check_migrations_table_exists():
+ # Bootstrap case - all migrations are pending
+ logfire.info("Migrations table doesn't exist, all migrations are pending")
+ return all_migrations
+
+ # Get applied migrations from database
+ applied_migrations = await self.get_applied_migrations()
+
+ # Create set of applied migration identifiers
+ applied_set = {(m.version, m.migration_name) for m in applied_migrations}
+
+ # Filter out applied migrations
+ pending = [m for m in all_migrations if (m.version, m.name) not in applied_set]
+
+ return pending
+
+ async def get_migration_status(self) -> dict[str, Any]:
+ """
+ Get comprehensive migration status.
+
+ Returns:
+ Dictionary with pending and applied migrations info
+ """
+ pending = await self.get_pending_migrations()
+ applied = await self.get_applied_migrations()
+
+ # Check if bootstrap is required
+ bootstrap_required = not await self.check_migrations_table_exists()
+
+ return {
+ "pending_migrations": [
+ {
+ "version": m.version,
+ "name": m.name,
+ "sql_content": m.sql_content,
+ "file_path": m.file_path,
+ "checksum": m.checksum,
+ }
+ for m in pending
+ ],
+ "applied_migrations": [
+ {
+ "version": m.version,
+ "migration_name": m.migration_name,
+ "applied_at": m.applied_at,
+ "checksum": m.checksum,
+ }
+ for m in applied
+ ],
+ "has_pending": len(pending) > 0,
+ "bootstrap_required": bootstrap_required,
+ "current_version": ARCHON_VERSION,
+ "pending_count": len(pending),
+ "applied_count": len(applied),
+ }
+
+
+# Export singleton instance
+migration_service = MigrationService()
diff --git a/python/src/server/services/version_service.py b/python/src/server/services/version_service.py
new file mode 100644
index 00000000..b916c984
--- /dev/null
+++ b/python/src/server/services/version_service.py
@@ -0,0 +1,162 @@
+"""
+Version checking service with GitHub API integration.
+"""
+
+from datetime import datetime, timedelta
+from typing import Any
+
+import httpx
+import logfire
+
+from ..config.version import ARCHON_VERSION, GITHUB_REPO_NAME, GITHUB_REPO_OWNER
+from ..utils.semantic_version import is_newer_version
+
+
+class VersionService:
+ """Service for checking Archon version against GitHub releases."""
+
+ def __init__(self):
+ self._cache: dict[str, Any] | None = None
+ self._cache_time: datetime | None = None
+ self._cache_ttl = 3600 # 1 hour cache TTL
+
+ def _is_cache_valid(self) -> bool:
+ """Check if cached data is still valid."""
+ if not self._cache or not self._cache_time:
+ return False
+
+ age = datetime.now() - self._cache_time
+ return age < timedelta(seconds=self._cache_ttl)
+
+ async def get_latest_release(self) -> dict[str, Any] | None:
+ """
+ Fetch latest release information from GitHub API.
+
+ Returns:
+ Release data dictionary or None if no releases
+ """
+ # Check cache first
+ if self._is_cache_valid():
+ logfire.debug("Using cached version data")
+ return self._cache
+
+ # GitHub API endpoint
+ url = f"https://api.github.com/repos/{GITHUB_REPO_OWNER}/{GITHUB_REPO_NAME}/releases/latest"
+
+ try:
+ async with httpx.AsyncClient(timeout=10.0) as client:
+ response = await client.get(
+ url,
+ headers={
+ "Accept": "application/vnd.github.v3+json",
+ "User-Agent": f"Archon/{ARCHON_VERSION}",
+ },
+ )
+
+ # Handle 404 - no releases yet
+ if response.status_code == 404:
+ logfire.info("No releases found on GitHub")
+ return None
+
+ response.raise_for_status()
+ data = response.json()
+
+ # Cache the successful response
+ self._cache = data
+ self._cache_time = datetime.now()
+
+ return data
+
+ except httpx.TimeoutException:
+ logfire.warning("GitHub API request timed out")
+ # Return cached data if available
+ if self._cache:
+ return self._cache
+ return None
+ except httpx.HTTPError as e:
+ logfire.error(f"HTTP error fetching latest release: {e}")
+ # Return cached data if available
+ if self._cache:
+ return self._cache
+ return None
+ except Exception as e:
+ logfire.error(f"Unexpected error fetching latest release: {e}")
+ # Return cached data if available
+ if self._cache:
+ return self._cache
+ return None
+
+ async def check_for_updates(self) -> dict[str, Any]:
+ """
+ Check if a newer version of Archon is available.
+
+ Returns:
+ Dictionary with version check results
+ """
+ try:
+ # Get latest release from GitHub
+ release = await self.get_latest_release()
+
+ if not release:
+ # No releases found or error occurred
+ return {
+ "current": ARCHON_VERSION,
+ "latest": None,
+ "update_available": False,
+ "release_url": None,
+ "release_notes": None,
+ "published_at": None,
+ "check_error": None,
+ }
+
+ # Extract version from tag_name (e.g., "v1.0.0" -> "1.0.0")
+ latest_version = release.get("tag_name", "")
+ if latest_version.startswith("v"):
+ latest_version = latest_version[1:]
+
+ # Check if update is available
+ update_available = is_newer_version(ARCHON_VERSION, latest_version)
+
+ # Parse published date
+ published_at = None
+ if release.get("published_at"):
+ try:
+ published_at = datetime.fromisoformat(
+ release["published_at"].replace("Z", "+00:00")
+ )
+ except Exception:
+ pass
+
+ return {
+ "current": ARCHON_VERSION,
+ "latest": latest_version,
+ "update_available": update_available,
+ "release_url": release.get("html_url"),
+ "release_notes": release.get("body"),
+ "published_at": published_at,
+ "check_error": None,
+ "assets": release.get("assets", []),
+ "author": release.get("author", {}).get("login"),
+ }
+
+ except Exception as e:
+ logfire.error(f"Error checking for updates: {e}")
+ # Return safe default with error
+ return {
+ "current": ARCHON_VERSION,
+ "latest": None,
+ "update_available": False,
+ "release_url": None,
+ "release_notes": None,
+ "published_at": None,
+ "check_error": str(e),
+ }
+
+ def clear_cache(self):
+ """Clear the cached version data."""
+ self._cache = None
+ self._cache_time = None
+
+
+# Export singleton instance
+version_service = VersionService()
diff --git a/python/src/server/utils/semantic_version.py b/python/src/server/utils/semantic_version.py
new file mode 100644
index 00000000..d869f7a8
--- /dev/null
+++ b/python/src/server/utils/semantic_version.py
@@ -0,0 +1,107 @@
+"""
+Semantic version parsing and comparison utilities.
+"""
+
+import re
+
+
+def parse_version(version_string: str) -> tuple[int, int, int, str | None]:
+ """
+ Parse a semantic version string into major, minor, patch, and optional prerelease.
+
+ Supports formats like:
+ - "1.0.0"
+ - "v1.0.0"
+ - "1.0.0-beta"
+ - "v1.0.0-rc.1"
+
+ Args:
+ version_string: Version string to parse
+
+ Returns:
+ Tuple of (major, minor, patch, prerelease)
+ """
+ # Remove 'v' prefix if present
+ version = version_string.strip()
+ if version.lower().startswith('v'):
+ version = version[1:]
+
+ # Parse version with optional prerelease
+ pattern = r'^(\d+)\.(\d+)\.(\d+)(?:-(.+))?$'
+ match = re.match(pattern, version)
+
+ if not match:
+ # Try to handle incomplete versions like "1.0"
+ simple_pattern = r'^(\d+)(?:\.(\d+))?(?:\.(\d+))?$'
+ simple_match = re.match(simple_pattern, version)
+ if simple_match:
+ major = int(simple_match.group(1))
+ minor = int(simple_match.group(2) or 0)
+ patch = int(simple_match.group(3) or 0)
+ return (major, minor, patch, None)
+ raise ValueError(f"Invalid version string: {version_string}")
+
+ major = int(match.group(1))
+ minor = int(match.group(2))
+ patch = int(match.group(3))
+ prerelease = match.group(4)
+
+ return (major, minor, patch, prerelease)
+
+
+def compare_versions(version1: str, version2: str) -> int:
+ """
+ Compare two semantic version strings.
+
+ Args:
+ version1: First version string
+ version2: Second version string
+
+ Returns:
+ -1 if version1 < version2
+ 0 if version1 == version2
+ 1 if version1 > version2
+ """
+ v1 = parse_version(version1)
+ v2 = parse_version(version2)
+
+ # Compare major, minor, patch
+ for i in range(3):
+ if v1[i] < v2[i]:
+ return -1
+ elif v1[i] > v2[i]:
+ return 1
+
+ # If main versions are equal, check prerelease
+ # No prerelease is considered newer than any prerelease
+ if v1[3] is None and v2[3] is None:
+ return 0
+ elif v1[3] is None:
+ return 1 # v1 is release, v2 is prerelease
+ elif v2[3] is None:
+ return -1 # v1 is prerelease, v2 is release
+ else:
+ # Both have prereleases, compare lexicographically
+ if v1[3] < v2[3]:
+ return -1
+ elif v1[3] > v2[3]:
+ return 1
+ return 0
+
+
+def is_newer_version(current: str, latest: str) -> bool:
+ """
+ Check if latest version is newer than current version.
+
+ Args:
+ current: Current version string
+ latest: Latest version string to compare
+
+ Returns:
+ True if latest > current, False otherwise
+ """
+ try:
+ return compare_versions(latest, current) > 0
+ except ValueError:
+ # If we can't parse versions, assume no update
+ return False
diff --git a/python/tests/server/api_routes/test_migration_api.py b/python/tests/server/api_routes/test_migration_api.py
new file mode 100644
index 00000000..57b9da2c
--- /dev/null
+++ b/python/tests/server/api_routes/test_migration_api.py
@@ -0,0 +1,206 @@
+"""
+Unit tests for migration_api.py
+"""
+
+from datetime import datetime
+from unittest.mock import AsyncMock, patch
+
+import pytest
+from fastapi.testclient import TestClient
+
+from src.server.config.version import ARCHON_VERSION
+from src.server.main import app
+from src.server.services.migration_service import MigrationRecord, PendingMigration
+
+
+@pytest.fixture
+def client():
+ """Create test client."""
+ return TestClient(app)
+
+
+@pytest.fixture
+def mock_applied_migrations():
+ """Mock applied migration data."""
+ return [
+ MigrationRecord({
+ "version": "0.1.0",
+ "migration_name": "001_initial",
+ "applied_at": datetime(2025, 1, 1, 0, 0, 0),
+ "checksum": "abc123",
+ }),
+ MigrationRecord({
+ "version": "0.1.0",
+ "migration_name": "002_add_column",
+ "applied_at": datetime(2025, 1, 2, 0, 0, 0),
+ "checksum": "def456",
+ }),
+ ]
+
+
+@pytest.fixture
+def mock_pending_migrations():
+ """Mock pending migration data."""
+ return [
+ PendingMigration(
+ version="0.1.0",
+ name="003_add_index",
+ sql_content="CREATE INDEX idx_test ON test_table(name);",
+ file_path="migration/0.1.0/003_add_index.sql"
+ ),
+ PendingMigration(
+ version="0.1.0",
+ name="004_add_table",
+ sql_content="CREATE TABLE new_table (id INT);",
+ file_path="migration/0.1.0/004_add_table.sql"
+ ),
+ ]
+
+
+@pytest.fixture
+def mock_migration_status(mock_applied_migrations, mock_pending_migrations):
+ """Mock complete migration status."""
+ return {
+ "pending_migrations": [
+ {"version": m.version, "name": m.name, "sql_content": m.sql_content, "file_path": m.file_path, "checksum": m.checksum}
+ for m in mock_pending_migrations
+ ],
+ "applied_migrations": [
+ {"version": m.version, "migration_name": m.migration_name, "applied_at": m.applied_at, "checksum": m.checksum}
+ for m in mock_applied_migrations
+ ],
+ "has_pending": True,
+ "bootstrap_required": False,
+ "current_version": ARCHON_VERSION,
+ "pending_count": 2,
+ "applied_count": 2,
+ }
+
+
+def test_get_migration_status_success(client, mock_migration_status):
+ """Test successful migration status retrieval."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_migration_status = AsyncMock(return_value=mock_migration_status)
+
+ response = client.get("/api/migrations/status")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["current_version"] == ARCHON_VERSION
+ assert data["has_pending"] is True
+ assert data["bootstrap_required"] is False
+ assert data["pending_count"] == 2
+ assert data["applied_count"] == 2
+ assert len(data["pending_migrations"]) == 2
+ assert len(data["applied_migrations"]) == 2
+
+
+def test_get_migration_status_bootstrap_required(client):
+ """Test migration status when bootstrap is required."""
+ mock_status = {
+ "pending_migrations": [],
+ "applied_migrations": [],
+ "has_pending": True,
+ "bootstrap_required": True,
+ "current_version": ARCHON_VERSION,
+ "pending_count": 5,
+ "applied_count": 0,
+ }
+
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_migration_status = AsyncMock(return_value=mock_status)
+
+ response = client.get("/api/migrations/status")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["bootstrap_required"] is True
+ assert data["applied_count"] == 0
+
+
+def test_get_migration_status_error(client):
+ """Test error handling in migration status."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_migration_status = AsyncMock(side_effect=Exception("Database error"))
+
+ response = client.get("/api/migrations/status")
+
+ assert response.status_code == 500
+ assert "Failed to get migration status" in response.json()["detail"]
+
+
+def test_get_migration_history_success(client, mock_applied_migrations):
+ """Test successful migration history retrieval."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_applied_migrations = AsyncMock(return_value=mock_applied_migrations)
+
+ response = client.get("/api/migrations/history")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["total_count"] == 2
+ assert data["current_version"] == ARCHON_VERSION
+ assert len(data["migrations"]) == 2
+ assert data["migrations"][0]["migration_name"] == "001_initial"
+
+
+def test_get_migration_history_empty(client):
+ """Test migration history when no migrations applied."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_applied_migrations = AsyncMock(return_value=[])
+
+ response = client.get("/api/migrations/history")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["total_count"] == 0
+ assert len(data["migrations"]) == 0
+
+
+def test_get_migration_history_error(client):
+ """Test error handling in migration history."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_applied_migrations = AsyncMock(side_effect=Exception("Database error"))
+
+ response = client.get("/api/migrations/history")
+
+ assert response.status_code == 500
+ assert "Failed to get migration history" in response.json()["detail"]
+
+
+def test_get_pending_migrations_success(client, mock_pending_migrations):
+ """Test successful pending migrations retrieval."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_pending_migrations = AsyncMock(return_value=mock_pending_migrations)
+
+ response = client.get("/api/migrations/pending")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert len(data) == 2
+ assert data[0]["name"] == "003_add_index"
+ assert data[0]["sql_content"] == "CREATE INDEX idx_test ON test_table(name);"
+ assert data[1]["name"] == "004_add_table"
+
+
+def test_get_pending_migrations_none(client):
+ """Test when no pending migrations exist."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_pending_migrations = AsyncMock(return_value=[])
+
+ response = client.get("/api/migrations/pending")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert len(data) == 0
+
+
+def test_get_pending_migrations_error(client):
+ """Test error handling in pending migrations."""
+ with patch("src.server.api_routes.migration_api.migration_service") as mock_service:
+ mock_service.get_pending_migrations = AsyncMock(side_effect=Exception("File error"))
+
+ response = client.get("/api/migrations/pending")
+
+ assert response.status_code == 500
+ assert "Failed to get pending migrations" in response.json()["detail"]
\ No newline at end of file
diff --git a/python/tests/server/api_routes/test_version_api.py b/python/tests/server/api_routes/test_version_api.py
new file mode 100644
index 00000000..d704c613
--- /dev/null
+++ b/python/tests/server/api_routes/test_version_api.py
@@ -0,0 +1,147 @@
+"""
+Unit tests for version_api.py
+"""
+
+from datetime import datetime
+from unittest.mock import AsyncMock, patch
+
+import pytest
+from fastapi.testclient import TestClient
+
+from src.server.config.version import ARCHON_VERSION
+from src.server.main import app
+
+
+@pytest.fixture
+def client():
+ """Create test client."""
+ return TestClient(app)
+
+
+@pytest.fixture
+def mock_version_data():
+ """Mock version check data."""
+ return {
+ "current": ARCHON_VERSION,
+ "latest": "0.2.0",
+ "update_available": True,
+ "release_url": "https://github.com/coleam00/Archon/releases/tag/v0.2.0",
+ "release_notes": "New features and bug fixes",
+ "published_at": datetime(2025, 1, 1, 0, 0, 0),
+ "check_error": None,
+ "author": "coleam00",
+ "assets": [{"name": "archon.zip", "size": 1024000}],
+ }
+
+
+def test_check_for_updates_success(client, mock_version_data):
+ """Test successful version check."""
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.check_for_updates = AsyncMock(return_value=mock_version_data)
+
+ response = client.get("/api/version/check")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["current"] == ARCHON_VERSION
+ assert data["latest"] == "0.2.0"
+ assert data["update_available"] is True
+ assert data["release_url"] == mock_version_data["release_url"]
+
+
+def test_check_for_updates_no_update(client):
+ """Test when no update is available."""
+ mock_data = {
+ "current": ARCHON_VERSION,
+ "latest": ARCHON_VERSION,
+ "update_available": False,
+ "release_url": None,
+ "release_notes": None,
+ "published_at": None,
+ "check_error": None,
+ }
+
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.check_for_updates = AsyncMock(return_value=mock_data)
+
+ response = client.get("/api/version/check")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["current"] == ARCHON_VERSION
+ assert data["latest"] == ARCHON_VERSION
+ assert data["update_available"] is False
+
+
+
+
+def test_check_for_updates_with_etag_modified(client, mock_version_data):
+ """Test ETag handling when data has changed."""
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.check_for_updates = AsyncMock(return_value=mock_version_data)
+
+ # First request
+ response1 = client.get("/api/version/check")
+ assert response1.status_code == 200
+ old_etag = response1.headers.get("etag")
+
+ # Modify data
+ modified_data = mock_version_data.copy()
+ modified_data["latest"] = "0.3.0"
+ mock_service.check_for_updates = AsyncMock(return_value=modified_data)
+
+ # Second request with old ETag
+ response2 = client.get("/api/version/check", headers={"If-None-Match": old_etag})
+ assert response2.status_code == 200 # Data changed, return new data
+ data = response2.json()
+ assert data["latest"] == "0.3.0"
+
+
+def test_check_for_updates_error_handling(client):
+ """Test error handling in version check."""
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.check_for_updates = AsyncMock(side_effect=Exception("API error"))
+
+ response = client.get("/api/version/check")
+
+ assert response.status_code == 200 # Should still return 200
+ data = response.json()
+ assert data["current"] == ARCHON_VERSION
+ assert data["latest"] is None
+ assert data["update_available"] is False
+ assert data["check_error"] == "API error"
+
+
+def test_get_current_version(client):
+ """Test getting current version."""
+ response = client.get("/api/version/current")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["version"] == ARCHON_VERSION
+ assert "timestamp" in data
+
+
+def test_clear_version_cache_success(client):
+ """Test clearing version cache."""
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.clear_cache.return_value = None
+
+ response = client.post("/api/version/clear-cache")
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["success"] is True
+ assert data["message"] == "Version cache cleared successfully"
+ mock_service.clear_cache.assert_called_once()
+
+
+def test_clear_version_cache_error(client):
+ """Test error handling when clearing cache fails."""
+ with patch("src.server.api_routes.version_api.version_service") as mock_service:
+ mock_service.clear_cache.side_effect = Exception("Cache error")
+
+ response = client.post("/api/version/clear-cache")
+
+ assert response.status_code == 500
+ assert "Failed to clear cache" in response.json()["detail"]
\ No newline at end of file
diff --git a/python/tests/server/services/test_migration_service.py b/python/tests/server/services/test_migration_service.py
new file mode 100644
index 00000000..83e46c9b
--- /dev/null
+++ b/python/tests/server/services/test_migration_service.py
@@ -0,0 +1,271 @@
+"""
+Fixed unit tests for migration_service.py
+"""
+
+import hashlib
+from datetime import datetime
+from pathlib import Path
+from unittest.mock import AsyncMock, MagicMock, Mock, patch
+
+import pytest
+
+from src.server.config.version import ARCHON_VERSION
+from src.server.services.migration_service import (
+ MigrationRecord,
+ MigrationService,
+ PendingMigration,
+)
+
+
+@pytest.fixture
+def migration_service():
+ """Create a migration service instance."""
+ with patch("src.server.services.migration_service.Path.exists") as mock_exists:
+ # Mock that migration directory exists locally
+ mock_exists.return_value = False # Docker path doesn't exist
+ service = MigrationService()
+ return service
+
+
+@pytest.fixture
+def mock_supabase_client():
+ """Mock Supabase client."""
+ client = MagicMock()
+ return client
+
+
+def test_pending_migration_init():
+ """Test PendingMigration initialization and checksum calculation."""
+ migration = PendingMigration(
+ version="0.1.0",
+ name="001_initial",
+ sql_content="CREATE TABLE test (id INT);",
+ file_path="migration/0.1.0/001_initial.sql"
+ )
+
+ assert migration.version == "0.1.0"
+ assert migration.name == "001_initial"
+ assert migration.sql_content == "CREATE TABLE test (id INT);"
+ assert migration.file_path == "migration/0.1.0/001_initial.sql"
+ assert migration.checksum == hashlib.md5("CREATE TABLE test (id INT);".encode()).hexdigest()
+
+
+def test_migration_record_init():
+ """Test MigrationRecord initialization from database data."""
+ data = {
+ "id": "123-456",
+ "version": "0.1.0",
+ "migration_name": "001_initial",
+ "applied_at": "2025-01-01T00:00:00Z",
+ "checksum": "abc123"
+ }
+
+ record = MigrationRecord(data)
+
+ assert record.id == "123-456"
+ assert record.version == "0.1.0"
+ assert record.migration_name == "001_initial"
+ assert record.applied_at == "2025-01-01T00:00:00Z"
+ assert record.checksum == "abc123"
+
+
+def test_migration_service_init_local():
+ """Test MigrationService initialization with local path."""
+ with patch("src.server.services.migration_service.Path.exists") as mock_exists:
+ # Mock that Docker path doesn't exist
+ mock_exists.return_value = False
+
+ service = MigrationService()
+ assert service._migrations_dir == Path("migration")
+
+
+def test_migration_service_init_docker():
+ """Test MigrationService initialization with Docker path."""
+ with patch("src.server.services.migration_service.Path.exists") as mock_exists:
+ # Mock that Docker path exists
+ mock_exists.return_value = True
+
+ service = MigrationService()
+ assert service._migrations_dir == Path("/app/migration")
+
+
+@pytest.mark.asyncio
+async def test_get_applied_migrations_success(migration_service, mock_supabase_client):
+ """Test successful retrieval of applied migrations."""
+ mock_response = MagicMock()
+ mock_response.data = [
+ {
+ "id": "123",
+ "version": "0.1.0",
+ "migration_name": "001_initial",
+ "applied_at": "2025-01-01T00:00:00Z",
+ "checksum": "abc123",
+ },
+ ]
+
+ mock_supabase_client.table.return_value.select.return_value.order.return_value.execute.return_value = mock_response
+
+ with patch.object(migration_service, '_get_supabase_client', return_value=mock_supabase_client):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=True):
+ result = await migration_service.get_applied_migrations()
+
+ assert len(result) == 1
+ assert isinstance(result[0], MigrationRecord)
+ assert result[0].version == "0.1.0"
+ assert result[0].migration_name == "001_initial"
+
+
+@pytest.mark.asyncio
+async def test_get_applied_migrations_table_not_exists(migration_service, mock_supabase_client):
+ """Test handling when migrations table doesn't exist."""
+ with patch.object(migration_service, '_get_supabase_client', return_value=mock_supabase_client):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=False):
+ result = await migration_service.get_applied_migrations()
+ assert result == []
+
+
+@pytest.mark.asyncio
+async def test_get_pending_migrations_with_files(migration_service, mock_supabase_client):
+ """Test getting pending migrations from filesystem."""
+ # Mock scan_migration_directory to return test migrations
+ mock_migrations = [
+ PendingMigration(
+ version="0.1.0",
+ name="001_initial",
+ sql_content="CREATE TABLE test;",
+ file_path="migration/0.1.0/001_initial.sql"
+ ),
+ PendingMigration(
+ version="0.1.0",
+ name="002_update",
+ sql_content="ALTER TABLE test ADD col TEXT;",
+ file_path="migration/0.1.0/002_update.sql"
+ )
+ ]
+
+ # Mock no applied migrations
+ with patch.object(migration_service, 'scan_migration_directory', return_value=mock_migrations):
+ with patch.object(migration_service, 'get_applied_migrations', return_value=[]):
+ result = await migration_service.get_pending_migrations()
+
+ assert len(result) == 2
+ assert all(isinstance(m, PendingMigration) for m in result)
+ assert result[0].name == "001_initial"
+ assert result[1].name == "002_update"
+
+
+@pytest.mark.asyncio
+async def test_get_pending_migrations_some_applied(migration_service, mock_supabase_client):
+ """Test getting pending migrations when some are already applied."""
+ # Mock all migrations
+ mock_all_migrations = [
+ PendingMigration(
+ version="0.1.0",
+ name="001_initial",
+ sql_content="CREATE TABLE test;",
+ file_path="migration/0.1.0/001_initial.sql"
+ ),
+ PendingMigration(
+ version="0.1.0",
+ name="002_update",
+ sql_content="ALTER TABLE test ADD col TEXT;",
+ file_path="migration/0.1.0/002_update.sql"
+ )
+ ]
+
+ # Mock first migration as applied
+ mock_applied = [
+ MigrationRecord({
+ "version": "0.1.0",
+ "migration_name": "001_initial",
+ "applied_at": "2025-01-01T00:00:00Z",
+ "checksum": None
+ })
+ ]
+
+ with patch.object(migration_service, 'scan_migration_directory', return_value=mock_all_migrations):
+ with patch.object(migration_service, 'get_applied_migrations', return_value=mock_applied):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=True):
+ result = await migration_service.get_pending_migrations()
+
+ assert len(result) == 1
+ assert result[0].name == "002_update"
+
+
+@pytest.mark.asyncio
+async def test_get_migration_status_all_applied(migration_service, mock_supabase_client):
+ """Test migration status when all migrations are applied."""
+ # Mock one migration file
+ mock_all_migrations = [
+ PendingMigration(
+ version="0.1.0",
+ name="001_initial",
+ sql_content="CREATE TABLE test;",
+ file_path="migration/0.1.0/001_initial.sql"
+ )
+ ]
+
+ # Mock migration as applied
+ mock_applied = [
+ MigrationRecord({
+ "version": "0.1.0",
+ "migration_name": "001_initial",
+ "applied_at": "2025-01-01T00:00:00Z",
+ "checksum": None
+ })
+ ]
+
+ with patch.object(migration_service, 'scan_migration_directory', return_value=mock_all_migrations):
+ with patch.object(migration_service, 'get_applied_migrations', return_value=mock_applied):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=True):
+ result = await migration_service.get_migration_status()
+
+ assert result["current_version"] == ARCHON_VERSION
+ assert result["has_pending"] is False
+ assert result["bootstrap_required"] is False
+ assert result["pending_count"] == 0
+ assert result["applied_count"] == 1
+
+
+@pytest.mark.asyncio
+async def test_get_migration_status_bootstrap_required(migration_service, mock_supabase_client):
+ """Test migration status when bootstrap is required (table doesn't exist)."""
+ # Mock migration files
+ mock_all_migrations = [
+ PendingMigration(
+ version="0.1.0",
+ name="001_initial",
+ sql_content="CREATE TABLE test;",
+ file_path="migration/0.1.0/001_initial.sql"
+ ),
+ PendingMigration(
+ version="0.1.0",
+ name="002_update",
+ sql_content="ALTER TABLE test ADD col TEXT;",
+ file_path="migration/0.1.0/002_update.sql"
+ )
+ ]
+
+ with patch.object(migration_service, 'scan_migration_directory', return_value=mock_all_migrations):
+ with patch.object(migration_service, 'get_applied_migrations', return_value=[]):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=False):
+ result = await migration_service.get_migration_status()
+
+ assert result["bootstrap_required"] is True
+ assert result["has_pending"] is True
+ assert result["pending_count"] == 2
+ assert result["applied_count"] == 0
+ assert len(result["pending_migrations"]) == 2
+
+
+@pytest.mark.asyncio
+async def test_get_migration_status_no_files(migration_service, mock_supabase_client):
+ """Test migration status when no migration files exist."""
+ with patch.object(migration_service, 'scan_migration_directory', return_value=[]):
+ with patch.object(migration_service, 'get_applied_migrations', return_value=[]):
+ with patch.object(migration_service, 'check_migrations_table_exists', return_value=True):
+ result = await migration_service.get_migration_status()
+
+ assert result["has_pending"] is False
+ assert result["pending_count"] == 0
+ assert len(result["pending_migrations"]) == 0
\ No newline at end of file
diff --git a/python/tests/server/services/test_version_service.py b/python/tests/server/services/test_version_service.py
new file mode 100644
index 00000000..0f76394d
--- /dev/null
+++ b/python/tests/server/services/test_version_service.py
@@ -0,0 +1,234 @@
+"""
+Unit tests for version_service.py
+"""
+
+import json
+from datetime import datetime, timedelta
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import httpx
+import pytest
+
+from src.server.config.version import ARCHON_VERSION
+from src.server.services.version_service import VersionService
+
+
+@pytest.fixture
+def version_service():
+ """Create a fresh version service instance for each test."""
+ service = VersionService()
+ # Clear any cache from previous tests
+ service._cache = None
+ service._cache_time = None
+ return service
+
+
+@pytest.fixture
+def mock_release_data():
+ """Mock GitHub release data."""
+ return {
+ "tag_name": "v0.2.0",
+ "name": "Archon v0.2.0",
+ "html_url": "https://github.com/coleam00/Archon/releases/tag/v0.2.0",
+ "body": "## Release Notes\n\nNew features and bug fixes",
+ "published_at": "2025-01-01T00:00:00Z",
+ "author": {"login": "coleam00"},
+ "assets": [
+ {
+ "name": "archon-v0.2.0.zip",
+ "size": 1024000,
+ "download_count": 100,
+ "browser_download_url": "https://github.com/coleam00/Archon/releases/download/v0.2.0/archon-v0.2.0.zip",
+ "content_type": "application/zip",
+ }
+ ],
+ }
+
+
+@pytest.mark.asyncio
+async def test_get_latest_release_success(version_service, mock_release_data):
+ """Test successful fetching of latest release from GitHub."""
+ with patch("httpx.AsyncClient") as mock_client_class:
+ mock_client = AsyncMock()
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = mock_release_data
+ mock_client.get.return_value = mock_response
+ mock_client_class.return_value.__aenter__.return_value = mock_client
+
+ result = await version_service.get_latest_release()
+
+ assert result == mock_release_data
+ assert version_service._cache == mock_release_data
+ assert version_service._cache_time is not None
+
+
+@pytest.mark.asyncio
+async def test_get_latest_release_uses_cache(version_service, mock_release_data):
+ """Test that cache is used when available and not expired."""
+ # Set up cache
+ version_service._cache = mock_release_data
+ version_service._cache_time = datetime.now()
+
+ with patch("httpx.AsyncClient") as mock_client_class:
+ result = await version_service.get_latest_release()
+
+ # Should not make HTTP request
+ mock_client_class.assert_not_called()
+ assert result == mock_release_data
+
+
+@pytest.mark.asyncio
+async def test_get_latest_release_cache_expired(version_service, mock_release_data):
+ """Test that cache is refreshed when expired."""
+ # Set up expired cache
+ old_data = {"tag_name": "v0.1.0"}
+ version_service._cache = old_data
+ version_service._cache_time = datetime.now() - timedelta(hours=2)
+
+ with patch("httpx.AsyncClient") as mock_client_class:
+ mock_client = AsyncMock()
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = mock_release_data
+ mock_client.get.return_value = mock_response
+ mock_client_class.return_value.__aenter__.return_value = mock_client
+
+ result = await version_service.get_latest_release()
+
+ # Should make new HTTP request
+ mock_client.get.assert_called_once()
+ assert result == mock_release_data
+ assert version_service._cache == mock_release_data
+
+
+@pytest.mark.asyncio
+async def test_get_latest_release_404(version_service):
+ """Test handling of 404 (no releases)."""
+ with patch("httpx.AsyncClient") as mock_client_class:
+ mock_client = AsyncMock()
+ mock_response = MagicMock()
+ mock_response.status_code = 404
+ mock_client.get.return_value = mock_response
+ mock_client_class.return_value.__aenter__.return_value = mock_client
+
+ result = await version_service.get_latest_release()
+
+ assert result is None
+
+
+@pytest.mark.asyncio
+async def test_get_latest_release_timeout(version_service, mock_release_data):
+ """Test handling of timeout with cache fallback."""
+ # Set up cache
+ version_service._cache = mock_release_data
+ version_service._cache_time = datetime.now() - timedelta(hours=2) # Expired
+
+ with patch("httpx.AsyncClient") as mock_client_class:
+ mock_client = AsyncMock()
+ mock_client.get.side_effect = httpx.TimeoutException("Timeout")
+ mock_client_class.return_value.__aenter__.return_value = mock_client
+
+ result = await version_service.get_latest_release()
+
+ # Should return cached data
+ assert result == mock_release_data
+
+
+@pytest.mark.asyncio
+async def test_check_for_updates_new_version_available(version_service, mock_release_data):
+ """Test when a new version is available."""
+ with patch.object(version_service, "get_latest_release", return_value=mock_release_data):
+ result = await version_service.check_for_updates()
+
+ assert result["current"] == ARCHON_VERSION
+ assert result["latest"] == "0.2.0"
+ assert result["update_available"] is True
+ assert result["release_url"] == mock_release_data["html_url"]
+ assert result["release_notes"] == mock_release_data["body"]
+ assert result["published_at"] == datetime.fromisoformat("2025-01-01T00:00:00+00:00")
+ assert result["author"] == "coleam00"
+ assert len(result["assets"]) == 1
+
+
+@pytest.mark.asyncio
+async def test_check_for_updates_same_version(version_service):
+ """Test when current version is up to date."""
+ mock_data = {"tag_name": f"v{ARCHON_VERSION}", "html_url": "test_url", "body": "notes"}
+
+ with patch.object(version_service, "get_latest_release", return_value=mock_data):
+ result = await version_service.check_for_updates()
+
+ assert result["current"] == ARCHON_VERSION
+ assert result["latest"] == ARCHON_VERSION
+ assert result["update_available"] is False
+
+
+@pytest.mark.asyncio
+async def test_check_for_updates_no_release(version_service):
+ """Test when no releases are found."""
+ with patch.object(version_service, "get_latest_release", return_value=None):
+ result = await version_service.check_for_updates()
+
+ assert result["current"] == ARCHON_VERSION
+ assert result["latest"] is None
+ assert result["update_available"] is False
+ assert result["release_url"] is None
+
+
+@pytest.mark.asyncio
+async def test_check_for_updates_parse_version(version_service, mock_release_data):
+ """Test version parsing with and without 'v' prefix."""
+ # Test with 'v' prefix
+ mock_release_data["tag_name"] = "v1.2.3"
+ with patch.object(version_service, "get_latest_release", return_value=mock_release_data):
+ result = await version_service.check_for_updates()
+ assert result["latest"] == "1.2.3"
+
+ # Test without 'v' prefix
+ mock_release_data["tag_name"] = "2.0.0"
+ with patch.object(version_service, "get_latest_release", return_value=mock_release_data):
+ result = await version_service.check_for_updates()
+ assert result["latest"] == "2.0.0"
+
+
+@pytest.mark.asyncio
+async def test_check_for_updates_missing_fields(version_service):
+ """Test handling of incomplete release data."""
+ mock_data = {"tag_name": "v0.2.0"} # Minimal data
+
+ with patch.object(version_service, "get_latest_release", return_value=mock_data):
+ result = await version_service.check_for_updates()
+
+ assert result["latest"] == "0.2.0"
+ assert result["release_url"] is None
+ assert result["release_notes"] is None
+ assert result["published_at"] is None
+ assert result["author"] is None
+ assert result["assets"] == [] # Empty list, not None
+
+
+def test_clear_cache(version_service, mock_release_data):
+ """Test cache clearing."""
+ # Set up cache
+ version_service._cache = mock_release_data
+ version_service._cache_time = datetime.now()
+
+ # Clear cache
+ version_service.clear_cache()
+
+ assert version_service._cache is None
+ assert version_service._cache_time is None
+
+
+def test_is_newer_version():
+ """Test version comparison logic using the utility function."""
+ from src.server.utils.semantic_version import is_newer_version
+
+ # Test various version comparisons
+ assert is_newer_version("1.0.0", "2.0.0") is True
+ assert is_newer_version("2.0.0", "1.0.0") is False
+ assert is_newer_version("1.0.0", "1.0.0") is False
+ assert is_newer_version("1.0.0", "1.1.0") is True
+ assert is_newer_version("1.0.0", "1.0.1") is True
+ assert is_newer_version("1.2.3", "1.2.3") is False
\ No newline at end of file
From d3a5c3311a96da9c1bbeb9af397ca2fdd0484021 Mon Sep 17 00:00:00 2001
From: Wirasm <152263317+Wirasm@users.noreply.github.com>
Date: Mon, 22 Sep 2025 12:54:55 +0300
Subject: [PATCH 4/7] refactor: move shared hooks from ui/hooks to shared/hooks
(#729)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Reorganize hook structure to follow vertical slice architecture:
- Move useSmartPolling, useThemeAware, useToast to features/shared/hooks
- Update 38+ import statements across codebase
- Update test file mocks to reference new locations
- Remove old ui/hooks directory
This change aligns shared utilities with the architectural pattern
where truly shared code resides in the shared directory.
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-authored-by: Claude
---
archon-ui-main/src/components/bug-report/BugReportModal.tsx | 2 +-
archon-ui-main/src/components/layout/MainLayout.tsx | 2 +-
archon-ui-main/src/components/onboarding/ProviderStep.tsx | 2 +-
archon-ui-main/src/components/settings/APIKeysSection.tsx | 2 +-
.../src/components/settings/CodeExtractionSettings.tsx | 2 +-
archon-ui-main/src/components/settings/FeaturesSection.tsx | 2 +-
archon-ui-main/src/components/settings/IDEGlobalRules.tsx | 2 +-
.../src/components/settings/OllamaConfigurationPanel.tsx | 2 +-
.../src/components/settings/OllamaInstanceHealthIndicator.tsx | 2 +-
.../src/components/settings/OllamaModelDiscoveryModal.tsx | 2 +-
.../src/components/settings/OllamaModelSelectionModal.tsx | 2 +-
archon-ui-main/src/components/settings/RAGSettings.tsx | 2 +-
.../src/features/knowledge/components/AddKnowledgeDialog.tsx | 2 +-
.../src/features/knowledge/components/KnowledgeTable.tsx | 2 +-
.../knowledge/hooks/tests/useKnowledgeQueries.test.ts | 4 ++--
.../src/features/knowledge/hooks/useKnowledgeQueries.ts | 4 ++--
archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx | 2 +-
.../src/features/mcp/components/McpConfigSection.tsx | 2 +-
archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts | 2 +-
.../src/features/progress/hooks/useProgressQueries.ts | 2 +-
.../src/features/projects/components/ProjectCardActions.tsx | 2 +-
.../features/projects/hooks/tests/useProjectQueries.test.ts | 4 ++--
.../src/features/projects/hooks/useProjectQueries.ts | 4 ++--
.../features/projects/tasks/components/TaskCardActions.tsx | 2 +-
.../projects/tasks/hooks/tests/useTaskQueries.test.ts | 4 ++--
.../src/features/projects/tasks/hooks/useTaskEditor.ts | 2 +-
.../src/features/projects/tasks/hooks/useTaskQueries.ts | 4 ++--
.../settings/migrations/components/PendingMigrationsModal.tsx | 2 +-
.../features/settings/migrations/hooks/useMigrationQueries.ts | 2 +-
.../src/features/settings/version/hooks/useVersionQueries.ts | 2 +-
archon-ui-main/src/features/{ui => shared}/hooks/index.ts | 2 +-
.../{ui => shared}/hooks/tests/useSmartPolling.test.ts | 0
.../src/features/{ui => shared}/hooks/useSmartPolling.ts | 0
.../src/features/{ui => shared}/hooks/useThemeAware.ts | 0
archon-ui-main/src/features/{ui => shared}/hooks/useToast.ts | 2 +-
archon-ui-main/src/features/ui/components/ToastProvider.tsx | 2 +-
archon-ui-main/src/pages/SettingsPage.tsx | 2 +-
37 files changed, 40 insertions(+), 40 deletions(-)
rename archon-ui-main/src/features/{ui => shared}/hooks/index.ts (70%)
rename archon-ui-main/src/features/{ui => shared}/hooks/tests/useSmartPolling.test.ts (100%)
rename archon-ui-main/src/features/{ui => shared}/hooks/useSmartPolling.ts (100%)
rename archon-ui-main/src/features/{ui => shared}/hooks/useThemeAware.ts (100%)
rename archon-ui-main/src/features/{ui => shared}/hooks/useToast.ts (97%)
diff --git a/archon-ui-main/src/components/bug-report/BugReportModal.tsx b/archon-ui-main/src/components/bug-report/BugReportModal.tsx
index 69b40262..2bfcb007 100644
--- a/archon-ui-main/src/components/bug-report/BugReportModal.tsx
+++ b/archon-ui-main/src/components/bug-report/BugReportModal.tsx
@@ -5,7 +5,7 @@ import { Button } from "../ui/Button";
import { Input } from "../ui/Input";
import { Card } from "../ui/Card";
import { Select } from "../ui/Select";
-import { useToast } from "../../features/ui/hooks/useToast";
+import { useToast } from "../../features/shared/hooks/useToast";
import {
bugReportService,
BugContext,
diff --git a/archon-ui-main/src/components/layout/MainLayout.tsx b/archon-ui-main/src/components/layout/MainLayout.tsx
index da0b2696..73fcc1de 100644
--- a/archon-ui-main/src/components/layout/MainLayout.tsx
+++ b/archon-ui-main/src/components/layout/MainLayout.tsx
@@ -2,7 +2,7 @@ import { AlertCircle, WifiOff } from "lucide-react";
import type React from "react";
import { useEffect } from "react";
import { useLocation, useNavigate } from "react-router-dom";
-import { useToast } from "../../features/ui/hooks/useToast";
+import { useToast } from "../../features/shared/hooks/useToast";
import { cn } from "../../lib/utils";
import { credentialsService } from "../../services/credentialsService";
import { isLmConfigured } from "../../utils/onboarding";
diff --git a/archon-ui-main/src/components/onboarding/ProviderStep.tsx b/archon-ui-main/src/components/onboarding/ProviderStep.tsx
index 546be5f7..1beae073 100644
--- a/archon-ui-main/src/components/onboarding/ProviderStep.tsx
+++ b/archon-ui-main/src/components/onboarding/ProviderStep.tsx
@@ -3,7 +3,7 @@ import { Key, ExternalLink, Save, Loader } from "lucide-react";
import { Input } from "../ui/Input";
import { Button } from "../ui/Button";
import { Select } from "../ui/Select";
-import { useToast } from "../../features/ui/hooks/useToast";
+import { useToast } from "../../features/shared/hooks/useToast";
import { credentialsService } from "../../services/credentialsService";
interface ProviderStepProps {
diff --git a/archon-ui-main/src/components/settings/APIKeysSection.tsx b/archon-ui-main/src/components/settings/APIKeysSection.tsx
index 231e1125..0d926014 100644
--- a/archon-ui-main/src/components/settings/APIKeysSection.tsx
+++ b/archon-ui-main/src/components/settings/APIKeysSection.tsx
@@ -4,7 +4,7 @@ import { Input } from '../ui/Input';
import { Button } from '../ui/Button';
import { Card } from '../ui/Card';
import { credentialsService, Credential } from '../../services/credentialsService';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
interface CustomCredential {
key: string;
diff --git a/archon-ui-main/src/components/settings/CodeExtractionSettings.tsx b/archon-ui-main/src/components/settings/CodeExtractionSettings.tsx
index 2e7d40fb..2dd322df 100644
--- a/archon-ui-main/src/components/settings/CodeExtractionSettings.tsx
+++ b/archon-ui-main/src/components/settings/CodeExtractionSettings.tsx
@@ -3,7 +3,7 @@ import { Code, Check, Save, Loader } from 'lucide-react';
import { Card } from '../ui/Card';
import { Input } from '../ui/Input';
import { Button } from '../ui/Button';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { credentialsService } from '../../services/credentialsService';
interface CodeExtractionSettingsProps {
diff --git a/archon-ui-main/src/components/settings/FeaturesSection.tsx b/archon-ui-main/src/components/settings/FeaturesSection.tsx
index 5fc57fb4..0a61cf5c 100644
--- a/archon-ui-main/src/components/settings/FeaturesSection.tsx
+++ b/archon-ui-main/src/components/settings/FeaturesSection.tsx
@@ -4,7 +4,7 @@ import { Toggle } from '../ui/Toggle';
import { Card } from '../ui/Card';
import { useTheme } from '../../contexts/ThemeContext';
import { credentialsService } from '../../services/credentialsService';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { serverHealthService } from '../../services/serverHealthService';
export const FeaturesSection = () => {
diff --git a/archon-ui-main/src/components/settings/IDEGlobalRules.tsx b/archon-ui-main/src/components/settings/IDEGlobalRules.tsx
index 7f65ce4b..b4e29ef9 100644
--- a/archon-ui-main/src/components/settings/IDEGlobalRules.tsx
+++ b/archon-ui-main/src/components/settings/IDEGlobalRules.tsx
@@ -2,7 +2,7 @@ import { useState } from 'react';
import { FileCode, Copy, Check } from 'lucide-react';
import { Card } from '../ui/Card';
import { Button } from '../ui/Button';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { copyToClipboard } from '../../features/shared/utils/clipboard';
type RuleType = 'claude' | 'universal';
diff --git a/archon-ui-main/src/components/settings/OllamaConfigurationPanel.tsx b/archon-ui-main/src/components/settings/OllamaConfigurationPanel.tsx
index c4a9e267..4da6f9a0 100644
--- a/archon-ui-main/src/components/settings/OllamaConfigurationPanel.tsx
+++ b/archon-ui-main/src/components/settings/OllamaConfigurationPanel.tsx
@@ -3,7 +3,7 @@ import { Card } from '../ui/Card';
import { Button } from '../ui/Button';
import { Input } from '../ui/Input';
import { Badge } from '../ui/Badge';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { cn } from '../../lib/utils';
import { credentialsService, OllamaInstance } from '../../services/credentialsService';
import { OllamaModelDiscoveryModal } from './OllamaModelDiscoveryModal';
diff --git a/archon-ui-main/src/components/settings/OllamaInstanceHealthIndicator.tsx b/archon-ui-main/src/components/settings/OllamaInstanceHealthIndicator.tsx
index c65b2159..4c646dfa 100644
--- a/archon-ui-main/src/components/settings/OllamaInstanceHealthIndicator.tsx
+++ b/archon-ui-main/src/components/settings/OllamaInstanceHealthIndicator.tsx
@@ -3,7 +3,7 @@ import { Badge } from '../ui/Badge';
import { Button } from '../ui/Button';
import { Card } from '../ui/Card';
import { cn } from '../../lib/utils';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { ollamaService } from '../../services/ollamaService';
import type { HealthIndicatorProps } from './types/OllamaTypes';
diff --git a/archon-ui-main/src/components/settings/OllamaModelDiscoveryModal.tsx b/archon-ui-main/src/components/settings/OllamaModelDiscoveryModal.tsx
index 7525f1bd..53a698b5 100644
--- a/archon-ui-main/src/components/settings/OllamaModelDiscoveryModal.tsx
+++ b/archon-ui-main/src/components/settings/OllamaModelDiscoveryModal.tsx
@@ -13,7 +13,7 @@ import { Button } from '../ui/Button';
import { Input } from '../ui/Input';
import { Badge } from '../ui/Badge';
import { Card } from '../ui/Card';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { ollamaService, type OllamaModel, type ModelDiscoveryResponse } from '../../services/ollamaService';
import type { OllamaInstance, ModelSelectionState } from './types/OllamaTypes';
diff --git a/archon-ui-main/src/components/settings/OllamaModelSelectionModal.tsx b/archon-ui-main/src/components/settings/OllamaModelSelectionModal.tsx
index 9933526a..3c539f9c 100644
--- a/archon-ui-main/src/components/settings/OllamaModelSelectionModal.tsx
+++ b/archon-ui-main/src/components/settings/OllamaModelSelectionModal.tsx
@@ -3,7 +3,7 @@ import ReactDOM from 'react-dom';
import { X, Search, RotateCcw, Zap, Server, Eye, Settings, Download, Box } from 'lucide-react';
import { Button } from '../ui/Button';
import { Input } from '../ui/Input';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
interface ContextInfo {
current?: number;
diff --git a/archon-ui-main/src/components/settings/RAGSettings.tsx b/archon-ui-main/src/components/settings/RAGSettings.tsx
index a60925bb..ccba61ce 100644
--- a/archon-ui-main/src/components/settings/RAGSettings.tsx
+++ b/archon-ui-main/src/components/settings/RAGSettings.tsx
@@ -4,7 +4,7 @@ import { Card } from '../ui/Card';
import { Input } from '../ui/Input';
import { Select } from '../ui/Select';
import { Button } from '../ui/Button';
-import { useToast } from '../../features/ui/hooks/useToast';
+import { useToast } from '../../features/shared/hooks/useToast';
import { credentialsService } from '../../services/credentialsService';
import OllamaModelDiscoveryModal from './OllamaModelDiscoveryModal';
import OllamaModelSelectionModal from './OllamaModelSelectionModal';
diff --git a/archon-ui-main/src/features/knowledge/components/AddKnowledgeDialog.tsx b/archon-ui-main/src/features/knowledge/components/AddKnowledgeDialog.tsx
index f6c7bc2a..3788affd 100644
--- a/archon-ui-main/src/features/knowledge/components/AddKnowledgeDialog.tsx
+++ b/archon-ui-main/src/features/knowledge/components/AddKnowledgeDialog.tsx
@@ -5,7 +5,7 @@
import { Globe, Loader2, Upload } from "lucide-react";
import { useId, useState } from "react";
-import { useToast } from "../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { Button, Input, Label } from "../../ui/primitives";
import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle } from "../../ui/primitives/dialog";
import { cn } from "../../ui/primitives/styles";
diff --git a/archon-ui-main/src/features/knowledge/components/KnowledgeTable.tsx b/archon-ui-main/src/features/knowledge/components/KnowledgeTable.tsx
index 18985523..63844333 100644
--- a/archon-ui-main/src/features/knowledge/components/KnowledgeTable.tsx
+++ b/archon-ui-main/src/features/knowledge/components/KnowledgeTable.tsx
@@ -6,7 +6,7 @@
import { formatDistanceToNowStrict } from "date-fns";
import { Code, ExternalLink, Eye, FileText, MoreHorizontal, Trash2 } from "lucide-react";
import { useState } from "react";
-import { useToast } from "../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { Button } from "../../ui/primitives";
import {
DropdownMenu,
diff --git a/archon-ui-main/src/features/knowledge/hooks/tests/useKnowledgeQueries.test.ts b/archon-ui-main/src/features/knowledge/hooks/tests/useKnowledgeQueries.test.ts
index 630f213a..c2251e03 100644
--- a/archon-ui-main/src/features/knowledge/hooks/tests/useKnowledgeQueries.test.ts
+++ b/archon-ui-main/src/features/knowledge/hooks/tests/useKnowledgeQueries.test.ts
@@ -23,14 +23,14 @@ vi.mock("../../services", () => ({
}));
// Mock the toast hook
-vi.mock("../../../ui/hooks/useToast", () => ({
+vi.mock("@/features/shared/hooks/useToast", () => ({
useToast: () => ({
showToast: vi.fn(),
}),
}));
// Mock smart polling
-vi.mock("../../../ui/hooks", () => ({
+vi.mock("@/features/shared/hooks", () => ({
useSmartPolling: () => ({
refetchInterval: 30000,
isPaused: false,
diff --git a/archon-ui-main/src/features/knowledge/hooks/useKnowledgeQueries.ts b/archon-ui-main/src/features/knowledge/hooks/useKnowledgeQueries.ts
index 874499e2..5a45561d 100644
--- a/archon-ui-main/src/features/knowledge/hooks/useKnowledgeQueries.ts
+++ b/archon-ui-main/src/features/knowledge/hooks/useKnowledgeQueries.ts
@@ -10,8 +10,8 @@ import { useActiveOperations } from "../../progress/hooks";
import { progressKeys } from "../../progress/hooks/useProgressQueries";
import type { ActiveOperation, ActiveOperationsResponse } from "../../progress/types";
import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/queryPatterns";
-import { useSmartPolling } from "../../ui/hooks";
-import { useToast } from "../../ui/hooks/useToast";
+import { useSmartPolling } from "@/features/shared/hooks";
+import { useToast } from "@/features/shared/hooks/useToast";
import { knowledgeService } from "../services";
import type {
CrawlRequest,
diff --git a/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx b/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
index 20d43650..6f6a66df 100644
--- a/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
+++ b/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
@@ -6,7 +6,7 @@
import { useEffect, useMemo, useRef, useState } from "react";
import { CrawlingProgress } from "../../progress/components/CrawlingProgress";
import type { ActiveOperation } from "../../progress/types";
-import { useToast } from "../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { AddKnowledgeDialog } from "../components/AddKnowledgeDialog";
import { KnowledgeHeader } from "../components/KnowledgeHeader";
import { KnowledgeList } from "../components/KnowledgeList";
diff --git a/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx b/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
index 3f011f9d..c36b2f01 100644
--- a/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
+++ b/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
@@ -1,7 +1,7 @@
import { Copy, ExternalLink } from "lucide-react";
import type React from "react";
import { useState } from "react";
-import { useToast } from "../../ui/hooks";
+import { useToast } from "@/features/shared/hooks";
import { Button, cn, glassmorphism, Tabs, TabsContent, TabsList, TabsTrigger } from "../../ui/primitives";
import type { McpServerConfig, McpServerStatus, SupportedIDE } from "../types";
import { copyToClipboard } from "../../shared/utils/clipboard";
diff --git a/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts b/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
index 409694f5..aef5ec68 100644
--- a/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
+++ b/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
@@ -1,6 +1,6 @@
import { useQuery } from "@tanstack/react-query";
import { STALE_TIMES } from "../../shared/queryPatterns";
-import { useSmartPolling } from "../../ui/hooks";
+import { useSmartPolling } from "@/features/shared/hooks";
import { mcpApi } from "../services";
// Query keys factory
diff --git a/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts b/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
index 19c8e401..ae82ba17 100644
--- a/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
+++ b/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
@@ -7,7 +7,7 @@ import { type UseQueryResult, useQueries, useQuery, useQueryClient } from "@tans
import { useEffect, useMemo, useRef } from "react";
import { APIServiceError } from "../../shared/errors";
import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/queryPatterns";
-import { useSmartPolling } from "../../ui/hooks";
+import { useSmartPolling } from "../../shared/hooks";
import { progressService } from "../services";
import type { ActiveOperationsResponse, ProgressResponse, ProgressStatus } from "../types";
diff --git a/archon-ui-main/src/features/projects/components/ProjectCardActions.tsx b/archon-ui-main/src/features/projects/components/ProjectCardActions.tsx
index 06a9f57d..fa10e71d 100644
--- a/archon-ui-main/src/features/projects/components/ProjectCardActions.tsx
+++ b/archon-ui-main/src/features/projects/components/ProjectCardActions.tsx
@@ -1,6 +1,6 @@
import { Clipboard, Pin, Trash2 } from "lucide-react";
import type React from "react";
-import { useToast } from "../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { cn, glassmorphism } from "../../ui/primitives/styles";
import { SimpleTooltip } from "../../ui/primitives/tooltip";
diff --git a/archon-ui-main/src/features/projects/hooks/tests/useProjectQueries.test.ts b/archon-ui-main/src/features/projects/hooks/tests/useProjectQueries.test.ts
index 1ad07cf4..19601382 100644
--- a/archon-ui-main/src/features/projects/hooks/tests/useProjectQueries.test.ts
+++ b/archon-ui-main/src/features/projects/hooks/tests/useProjectQueries.test.ts
@@ -20,14 +20,14 @@ vi.mock("../../services", () => ({
}));
// Mock the toast hook
-vi.mock("../../../ui/hooks/useToast", () => ({
+vi.mock("@/features/shared/hooks/useToast", () => ({
useToast: () => ({
showToast: vi.fn(),
}),
}));
// Mock smart polling
-vi.mock("../../../ui/hooks", () => ({
+vi.mock("@/features/shared/hooks", () => ({
useSmartPolling: () => ({
refetchInterval: 5000,
isPaused: false,
diff --git a/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts b/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
index eaa85e66..ae216e66 100644
--- a/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
+++ b/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
@@ -6,8 +6,8 @@ import {
replaceOptimisticEntity,
} from "@/features/shared/optimistic";
import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/queryPatterns";
-import { useSmartPolling } from "../../ui/hooks";
-import { useToast } from "../../ui/hooks/useToast";
+import { useSmartPolling } from "@/features/shared/hooks";
+import { useToast } from "@/features/shared/hooks/useToast";
import { projectService } from "../services";
import type { CreateProjectRequest, Project, UpdateProjectRequest } from "../types";
diff --git a/archon-ui-main/src/features/projects/tasks/components/TaskCardActions.tsx b/archon-ui-main/src/features/projects/tasks/components/TaskCardActions.tsx
index 3070d521..7bf60a31 100644
--- a/archon-ui-main/src/features/projects/tasks/components/TaskCardActions.tsx
+++ b/archon-ui-main/src/features/projects/tasks/components/TaskCardActions.tsx
@@ -1,6 +1,6 @@
import { Clipboard, Edit, Trash2 } from "lucide-react";
import type React from "react";
-import { useToast } from "../../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { cn, glassmorphism } from "../../../ui/primitives/styles";
import { SimpleTooltip } from "../../../ui/primitives/tooltip";
diff --git a/archon-ui-main/src/features/projects/tasks/hooks/tests/useTaskQueries.test.ts b/archon-ui-main/src/features/projects/tasks/hooks/tests/useTaskQueries.test.ts
index ed1c6089..b2612637 100644
--- a/archon-ui-main/src/features/projects/tasks/hooks/tests/useTaskQueries.test.ts
+++ b/archon-ui-main/src/features/projects/tasks/hooks/tests/useTaskQueries.test.ts
@@ -20,14 +20,14 @@ vi.mock("../../services", () => ({
const showToastMock = vi.fn();
// Mock the toast hook
-vi.mock("../../../../ui/hooks/useToast", () => ({
+vi.mock("../../../../shared/hooks/useToast", () => ({
useToast: () => ({
showToast: showToastMock,
}),
}));
// Mock smart polling
-vi.mock("../../../../ui/hooks", () => ({
+vi.mock("../../../../shared/hooks", () => ({
useSmartPolling: () => ({
refetchInterval: 5000,
isPaused: false,
diff --git a/archon-ui-main/src/features/projects/tasks/hooks/useTaskEditor.ts b/archon-ui-main/src/features/projects/tasks/hooks/useTaskEditor.ts
index efb37ab6..fff35286 100644
--- a/archon-ui-main/src/features/projects/tasks/hooks/useTaskEditor.ts
+++ b/archon-ui-main/src/features/projects/tasks/hooks/useTaskEditor.ts
@@ -1,5 +1,5 @@
import { useCallback } from "react";
-import { useToast } from "../../../ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import { useProjectFeatures } from "../../hooks/useProjectQueries";
import type { Assignee, CreateTaskRequest, Task, UpdateTaskRequest, UseTaskEditorReturn } from "../types";
import { useCreateTask, useUpdateTask } from "./useTaskQueries";
diff --git a/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts b/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
index b39cbb18..55b4bbd0 100644
--- a/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
+++ b/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
@@ -6,8 +6,8 @@ import {
type OptimisticEntity,
} from "@/features/shared/optimistic";
import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../../shared/queryPatterns";
-import { useSmartPolling } from "../../../ui/hooks";
-import { useToast } from "../../../ui/hooks/useToast";
+import { useSmartPolling } from "../../../shared/hooks";
+import { useToast } from "../../../shared/hooks/useToast";
import { taskService } from "../services";
import type { CreateTaskRequest, Task, UpdateTaskRequest } from "../types";
diff --git a/archon-ui-main/src/features/settings/migrations/components/PendingMigrationsModal.tsx b/archon-ui-main/src/features/settings/migrations/components/PendingMigrationsModal.tsx
index f4bd23c0..ff5ec746 100644
--- a/archon-ui-main/src/features/settings/migrations/components/PendingMigrationsModal.tsx
+++ b/archon-ui-main/src/features/settings/migrations/components/PendingMigrationsModal.tsx
@@ -6,7 +6,7 @@ import { AnimatePresence, motion } from "framer-motion";
import { CheckCircle, Copy, Database, ExternalLink, X } from "lucide-react";
import React from "react";
import { copyToClipboard } from "@/features/shared/utils/clipboard";
-import { useToast } from "@/features/ui/hooks/useToast";
+import { useToast } from "@/features/shared/hooks/useToast";
import type { PendingMigration } from "../types";
interface PendingMigrationsModalProps {
diff --git a/archon-ui-main/src/features/settings/migrations/hooks/useMigrationQueries.ts b/archon-ui-main/src/features/settings/migrations/hooks/useMigrationQueries.ts
index 1c2a6d7e..7a44ff8d 100644
--- a/archon-ui-main/src/features/settings/migrations/hooks/useMigrationQueries.ts
+++ b/archon-ui-main/src/features/settings/migrations/hooks/useMigrationQueries.ts
@@ -4,7 +4,7 @@
import { useQuery } from "@tanstack/react-query";
import { STALE_TIMES } from "@/features/shared/queryPatterns";
-import { useSmartPolling } from "@/features/ui/hooks/useSmartPolling";
+import { useSmartPolling } from "@/features/shared/hooks/useSmartPolling";
import { migrationService } from "../services/migrationService";
import type { MigrationHistoryResponse, MigrationStatusResponse, PendingMigration } from "../types";
diff --git a/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts b/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts
index e1aefbd8..f9ea9165 100644
--- a/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts
+++ b/archon-ui-main/src/features/settings/version/hooks/useVersionQueries.ts
@@ -4,7 +4,7 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { STALE_TIMES } from "@/features/shared/queryPatterns";
-import { useSmartPolling } from "@/features/ui/hooks/useSmartPolling";
+import { useSmartPolling } from "@/features/shared/hooks/useSmartPolling";
import { versionService } from "../services/versionService";
import type { VersionCheckResponse } from "../types";
diff --git a/archon-ui-main/src/features/ui/hooks/index.ts b/archon-ui-main/src/features/shared/hooks/index.ts
similarity index 70%
rename from archon-ui-main/src/features/ui/hooks/index.ts
rename to archon-ui-main/src/features/shared/hooks/index.ts
index b23209b4..db280d76 100644
--- a/archon-ui-main/src/features/ui/hooks/index.ts
+++ b/archon-ui-main/src/features/shared/hooks/index.ts
@@ -1,3 +1,3 @@
export * from "./useSmartPolling";
export * from "./useThemeAware";
-export * from "./useToast";
+export * from "./useToast";
\ No newline at end of file
diff --git a/archon-ui-main/src/features/ui/hooks/tests/useSmartPolling.test.ts b/archon-ui-main/src/features/shared/hooks/tests/useSmartPolling.test.ts
similarity index 100%
rename from archon-ui-main/src/features/ui/hooks/tests/useSmartPolling.test.ts
rename to archon-ui-main/src/features/shared/hooks/tests/useSmartPolling.test.ts
diff --git a/archon-ui-main/src/features/ui/hooks/useSmartPolling.ts b/archon-ui-main/src/features/shared/hooks/useSmartPolling.ts
similarity index 100%
rename from archon-ui-main/src/features/ui/hooks/useSmartPolling.ts
rename to archon-ui-main/src/features/shared/hooks/useSmartPolling.ts
diff --git a/archon-ui-main/src/features/ui/hooks/useThemeAware.ts b/archon-ui-main/src/features/shared/hooks/useThemeAware.ts
similarity index 100%
rename from archon-ui-main/src/features/ui/hooks/useThemeAware.ts
rename to archon-ui-main/src/features/shared/hooks/useThemeAware.ts
diff --git a/archon-ui-main/src/features/ui/hooks/useToast.ts b/archon-ui-main/src/features/shared/hooks/useToast.ts
similarity index 97%
rename from archon-ui-main/src/features/ui/hooks/useToast.ts
rename to archon-ui-main/src/features/shared/hooks/useToast.ts
index 6e71297e..49b40139 100644
--- a/archon-ui-main/src/features/ui/hooks/useToast.ts
+++ b/archon-ui-main/src/features/shared/hooks/useToast.ts
@@ -1,6 +1,6 @@
import { AlertCircle, CheckCircle, Info, XCircle } from "lucide-react";
import { createContext, useCallback, useContext, useEffect, useRef, useState } from "react";
-import { createOptimisticId } from "../../shared/optimistic";
+import { createOptimisticId } from "../optimistic";
// Toast types
interface Toast {
diff --git a/archon-ui-main/src/features/ui/components/ToastProvider.tsx b/archon-ui-main/src/features/ui/components/ToastProvider.tsx
index 1657ac80..8a3c476b 100644
--- a/archon-ui-main/src/features/ui/components/ToastProvider.tsx
+++ b/archon-ui-main/src/features/ui/components/ToastProvider.tsx
@@ -1,5 +1,5 @@
import type React from "react";
-import { createToastContext, getToastIcon, ToastContext } from "../hooks/useToast";
+import { createToastContext, getToastIcon, ToastContext } from "../../shared/hooks/useToast";
import {
ToastProvider as RadixToastProvider,
Toast,
diff --git a/archon-ui-main/src/pages/SettingsPage.tsx b/archon-ui-main/src/pages/SettingsPage.tsx
index 20c3c412..35136616 100644
--- a/archon-ui-main/src/pages/SettingsPage.tsx
+++ b/archon-ui-main/src/pages/SettingsPage.tsx
@@ -14,7 +14,7 @@ import {
Database,
} from "lucide-react";
import { motion, AnimatePresence } from "framer-motion";
-import { useToast } from "../features/ui/hooks/useToast";
+import { useToast } from "../features/shared/hooks/useToast";
import { useSettings } from "../contexts/SettingsContext";
import { useStaggeredEntrance } from "../hooks/useStaggeredEntrance";
import { FeaturesSection } from "../components/settings/FeaturesSection";
From 63a92cf7d75bd4fbc236816196d5c506f5c620c3 Mon Sep 17 00:00:00 2001
From: Wirasm <152263317+Wirasm@users.noreply.github.com>
Date: Mon, 22 Sep 2025 14:59:33 +0300
Subject: [PATCH 5/7] refactor: reorganize features/shared directory for better
maintainability (#730)
* refactor: reorganize features/shared directory structure
- Created organized subdirectories for better code organization:
- api/ - API clients and HTTP utilities (renamed apiWithEtag.ts to apiClient.ts)
- config/ - Configuration files (queryClient, queryPatterns)
- types/ - Shared type definitions (errors)
- utils/ - Pure utility functions (optimistic, clipboard)
- hooks/ - Shared React hooks (already existed)
- Updated all import paths across the codebase (~40+ files)
- Updated all AI documentation in PRPs/ai_docs/ to reflect new structure
- All tests passing, build successful, no functional changes
This improves maintainability and follows vertical slice architecture patterns.
Co-Authored-By: Claude
* fix: address PR review comments and code improvements
- Update imports to use @/features alias path for optimistic utils
- Fix optimistic upload item replacement by matching on source_id instead of id
- Clean up test suite naming and remove meta-terms from comments
- Only set Content-Type header on requests with body
- Add explicit TypeScript typing to useProjectFeatures hook
- Complete Phase 4 improvements with proper query typing
* fix: address additional PR review feedback
- Clear feature queries when deleting project to prevent cache memory leaks
- Update KnowledgeCard comments to follow documentation guidelines
- Add explanatory comment for accessibility pattern in KnowledgeCard
---------
Co-authored-by: Claude
---
PRPs/ai_docs/API_NAMING_CONVENTIONS.md | 2 +-
PRPs/ai_docs/ARCHITECTURE.md | 6 +-
PRPs/ai_docs/DATA_FETCHING_ARCHITECTURE.md | 10 +-
PRPs/ai_docs/ETAG_IMPLEMENTATION.md | 10 +-
PRPs/ai_docs/QUERY_PATTERNS.md | 8 +-
PRPs/ai_docs/optimistic_updates.md | 6 +-
archon-ui-main/src/App.tsx | 2 +-
.../layout/hooks/useBackendHealth.ts | 4 +-
.../knowledge/components/KnowledgeCard.tsx | 7 +-
.../knowledge/hooks/useKnowledgeQueries.ts | 17 +-
.../components/KnowledgeInspector.tsx | 2 +-
.../inspector/hooks/useInspectorPagination.ts | 2 +-
.../knowledge/services/knowledgeService.ts | 4 +-
.../utils/tests/providerErrorHandler.test.ts | 206 +++++++++---------
.../knowledge/views/KnowledgeView.tsx | 2 +-
.../mcp/components/McpConfigSection.tsx | 5 +-
.../src/features/mcp/hooks/useMcpQueries.ts | 2 +-
.../src/features/mcp/services/mcpApi.ts | 2 +-
.../hooks/tests/useProgressQueries.test.ts | 2 +-
.../progress/hooks/useProgressQueries.ts | 4 +-
.../progress/services/progressService.ts | 2 +-
.../projects/components/ProjectCard.tsx | 2 +-
.../documents/components/DocumentCard.tsx | 2 +-
.../documents/hooks/useDocumentQueries.ts | 2 +-
.../projects/hooks/useProjectQueries.ts | 14 +-
.../projects/services/projectService.ts | 4 +-
.../projects/tasks/components/TaskCard.tsx | 2 +-
.../projects/tasks/hooks/useTaskQueries.ts | 8 +-
.../projects/tasks/services/taskService.ts | 4 +-
.../tasks/services/tests/taskService.test.ts | 4 +-
.../components/MigrationStatusCard.tsx | 6 +-
.../components/PendingMigrationsModal.tsx | 17 +-
.../migrations/hooks/useMigrationQueries.ts | 2 +-
.../migrations/services/migrationService.ts | 2 +-
.../version/components/UpdateBanner.tsx | 3 +-
.../version/components/VersionStatusCard.tsx | 3 +-
.../version/hooks/useVersionQueries.ts | 2 +-
.../version/services/versionService.ts | 2 +-
.../{apiWithEtag.ts => api/apiClient.ts} | 23 +-
.../tests/apiClient.test.ts} | 18 +-
.../shared/{ => config}/queryClient.ts | 0
.../shared/{ => config}/queryPatterns.ts | 0
.../src/features/shared/hooks/index.ts | 2 +-
.../src/features/shared/hooks/useToast.ts | 2 +-
.../src/features/shared/{ => types}/errors.ts | 0
.../features/shared/{ => utils}/optimistic.ts | 0
.../{ => utils/tests}/optimistic.test.ts | 14 +-
.../src/features/testing/test-utils.tsx | 2 +-
48 files changed, 230 insertions(+), 215 deletions(-)
rename archon-ui-main/src/features/shared/{apiWithEtag.ts => api/apiClient.ts} (83%)
rename archon-ui-main/src/features/shared/{apiWithEtag.test.ts => api/tests/apiClient.test.ts} (96%)
rename archon-ui-main/src/features/shared/{ => config}/queryClient.ts (100%)
rename archon-ui-main/src/features/shared/{ => config}/queryPatterns.ts (100%)
rename archon-ui-main/src/features/shared/{ => types}/errors.ts (100%)
rename archon-ui-main/src/features/shared/{ => utils}/optimistic.ts (100%)
rename archon-ui-main/src/features/shared/{ => utils/tests}/optimistic.test.ts (97%)
diff --git a/PRPs/ai_docs/API_NAMING_CONVENTIONS.md b/PRPs/ai_docs/API_NAMING_CONVENTIONS.md
index 5688912b..2135bc8d 100644
--- a/PRPs/ai_docs/API_NAMING_CONVENTIONS.md
+++ b/PRPs/ai_docs/API_NAMING_CONVENTIONS.md
@@ -198,7 +198,7 @@ Database values used directly - no mapping layers:
- Operation statuses: `"pending"`, `"processing"`, `"completed"`, `"failed"`
### Time Constants
-**Location**: `archon-ui-main/src/features/shared/queryPatterns.ts`
+**Location**: `archon-ui-main/src/features/shared/config/queryPatterns.ts`
- `STALE_TIMES.instant` - 0ms
- `STALE_TIMES.realtime` - 3 seconds
- `STALE_TIMES.frequent` - 5 seconds
diff --git a/PRPs/ai_docs/ARCHITECTURE.md b/PRPs/ai_docs/ARCHITECTURE.md
index a5c0ae7a..eb3a7f81 100644
--- a/PRPs/ai_docs/ARCHITECTURE.md
+++ b/PRPs/ai_docs/ARCHITECTURE.md
@@ -88,8 +88,8 @@ Pattern: `{METHOD} /api/{resource}/{id?}/{sub-resource?}`
### Data Fetching
**Core**: TanStack Query v5
-**Configuration**: `archon-ui-main/src/features/shared/queryClient.ts`
-**Patterns**: `archon-ui-main/src/features/shared/queryPatterns.ts`
+**Configuration**: `archon-ui-main/src/features/shared/config/queryClient.ts`
+**Patterns**: `archon-ui-main/src/features/shared/config/queryPatterns.ts`
### State Management
- **Server State**: TanStack Query
@@ -139,7 +139,7 @@ TanStack Query is the single source of truth. No separate state management neede
No translation layers. Database values (e.g., `"todo"`, `"doing"`) used directly in UI.
### Browser-Native Caching
-ETags handled by browser, not JavaScript. See `archon-ui-main/src/features/shared/apiWithEtag.ts`.
+ETags handled by browser, not JavaScript. See `archon-ui-main/src/features/shared/api/apiClient.ts`.
## Deployment
diff --git a/PRPs/ai_docs/DATA_FETCHING_ARCHITECTURE.md b/PRPs/ai_docs/DATA_FETCHING_ARCHITECTURE.md
index d8a9822b..8d1bbb62 100644
--- a/PRPs/ai_docs/DATA_FETCHING_ARCHITECTURE.md
+++ b/PRPs/ai_docs/DATA_FETCHING_ARCHITECTURE.md
@@ -8,7 +8,7 @@ Archon uses **TanStack Query v5** for all data fetching, caching, and synchroniz
### 1. Query Client Configuration
-**Location**: `archon-ui-main/src/features/shared/queryClient.ts`
+**Location**: `archon-ui-main/src/features/shared/config/queryClient.ts`
Centralized QueryClient with:
@@ -30,7 +30,7 @@ Visibility-aware polling that:
### 3. Query Patterns
-**Location**: `archon-ui-main/src/features/shared/queryPatterns.ts`
+**Location**: `archon-ui-main/src/features/shared/config/queryPatterns.ts`
Shared constants:
@@ -64,7 +64,7 @@ Standard pattern across all features:
### ETag Support
-**Location**: `archon-ui-main/src/features/shared/apiWithEtag.ts`
+**Location**: `archon-ui-main/src/features/shared/api/apiClient.ts`
ETag implementation:
@@ -83,7 +83,7 @@ Backend endpoints follow RESTful patterns:
## Optimistic Updates
-**Utilities**: `archon-ui-main/src/features/shared/optimistic.ts`
+**Utilities**: `archon-ui-main/src/features/shared/utils/optimistic.ts`
All mutations use nanoid-based optimistic updates:
@@ -105,7 +105,7 @@ Polling intervals are defined in each feature's query hooks. See actual implemen
- **Progress**: `archon-ui-main/src/features/progress/hooks/useProgressQueries.ts`
- **MCP**: `archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts`
-Standard intervals from `archon-ui-main/src/features/shared/queryPatterns.ts`:
+Standard intervals from `archon-ui-main/src/features/shared/config/queryPatterns.ts`:
- `STALE_TIMES.instant`: 0ms (always fresh)
- `STALE_TIMES.frequent`: 5 seconds (frequently changing data)
- `STALE_TIMES.normal`: 30 seconds (standard cache)
diff --git a/PRPs/ai_docs/ETAG_IMPLEMENTATION.md b/PRPs/ai_docs/ETAG_IMPLEMENTATION.md
index 70e4ce63..8560dbb5 100644
--- a/PRPs/ai_docs/ETAG_IMPLEMENTATION.md
+++ b/PRPs/ai_docs/ETAG_IMPLEMENTATION.md
@@ -17,7 +17,7 @@ The backend generates ETags for API responses:
- Returns `304 Not Modified` when ETags match
### Frontend Handling
-**Location**: `archon-ui-main/src/features/shared/apiWithEtag.ts`
+**Location**: `archon-ui-main/src/features/shared/api/apiClient.ts`
The frontend relies on browser-native HTTP caching:
- Browser automatically sends `If-None-Match` headers with cached ETags
@@ -28,7 +28,7 @@ The frontend relies on browser-native HTTP caching:
#### Browser vs Non-Browser Behavior
- **Standard Browsers**: Per the Fetch spec, a 304 response freshens the HTTP cache and returns the cached body to JavaScript
- **Non-Browser Runtimes** (React Native, custom fetch): May surface 304 with empty body to JavaScript
-- **Client Fallback**: The `apiWithEtag.ts` implementation handles both scenarios, ensuring consistent behavior across environments
+- **Client Fallback**: The `apiClient.ts` implementation handles both scenarios, ensuring consistent behavior across environments
## Implementation Details
@@ -81,8 +81,8 @@ Unlike previous implementations, the current approach:
### Configuration
Cache behavior is controlled through TanStack Query's `staleTime`:
-- See `archon-ui-main/src/features/shared/queryPatterns.ts` for standard times
-- See `archon-ui-main/src/features/shared/queryClient.ts` for global configuration
+- See `archon-ui-main/src/features/shared/config/queryPatterns.ts` for standard times
+- See `archon-ui-main/src/features/shared/config/queryClient.ts` for global configuration
## Performance Benefits
@@ -100,7 +100,7 @@ Cache behavior is controlled through TanStack Query's `staleTime`:
### Core Implementation
- **Backend Utilities**: `python/src/server/utils/etag_utils.py`
-- **Frontend Client**: `archon-ui-main/src/features/shared/apiWithEtag.ts`
+- **Frontend Client**: `archon-ui-main/src/features/shared/api/apiClient.ts`
- **Tests**: `python/tests/server/utils/test_etag_utils.py`
### Usage Examples
diff --git a/PRPs/ai_docs/QUERY_PATTERNS.md b/PRPs/ai_docs/QUERY_PATTERNS.md
index 3c3204db..499daa36 100644
--- a/PRPs/ai_docs/QUERY_PATTERNS.md
+++ b/PRPs/ai_docs/QUERY_PATTERNS.md
@@ -5,7 +5,7 @@ This guide documents the standardized patterns for using TanStack Query v5 in th
## Core Principles
1. **Feature Ownership**: Each feature owns its query keys in `{feature}/hooks/use{Feature}Queries.ts`
-2. **Consistent Patterns**: Always use shared patterns from `shared/queryPatterns.ts`
+2. **Consistent Patterns**: Always use shared patterns from `shared/config/queryPatterns.ts`
3. **No Hardcoded Values**: Never hardcode stale times or disabled keys
4. **Mirror Backend API**: Query keys should exactly match backend API structure
@@ -49,7 +49,7 @@ export const taskKeys = {
### Import Required Patterns
```typescript
-import { DISABLED_QUERY_KEY, STALE_TIMES } from "@/features/shared/queryPatterns";
+import { DISABLED_QUERY_KEY, STALE_TIMES } from "@/features/shared/config/queryPatterns";
```
### Disabled Queries
@@ -106,7 +106,7 @@ export function useFeatureDetail(id: string | undefined) {
## Mutations with Optimistic Updates
```typescript
-import { createOptimisticEntity, replaceOptimisticEntity } from "@/features/shared/optimistic";
+import { createOptimisticEntity, replaceOptimisticEntity } from "@/features/shared/utils/optimistic";
export function useCreateFeature() {
const queryClient = useQueryClient();
@@ -161,7 +161,7 @@ vi.mock("../../services", () => ({
}));
// Mock shared patterns with ALL values
-vi.mock("../../../shared/queryPatterns", () => ({
+vi.mock("../../../shared/config/queryPatterns", () => ({
DISABLED_QUERY_KEY: ["disabled"] as const,
STALE_TIMES: {
instant: 0,
diff --git a/PRPs/ai_docs/optimistic_updates.md b/PRPs/ai_docs/optimistic_updates.md
index 7be11ea6..219b7866 100644
--- a/PRPs/ai_docs/optimistic_updates.md
+++ b/PRPs/ai_docs/optimistic_updates.md
@@ -3,7 +3,7 @@
## Core Architecture
### Shared Utilities Module
-**Location**: `src/features/shared/optimistic.ts`
+**Location**: `src/features/shared/utils/optimistic.ts`
Provides type-safe utilities for managing optimistic state across all features:
- `createOptimisticId()` - Generates stable UUIDs using nanoid
@@ -73,13 +73,13 @@ Reusable component showing:
- Uses `createOptimisticId()` directly for progress tracking
### Toasts
-- **Location**: `src/features/ui/hooks/useToast.ts:43`
+- **Location**: `src/features/shared/hooks/useToast.ts:43`
- Uses `createOptimisticId()` for unique toast IDs
## Testing
### Unit Tests
-**Location**: `src/features/shared/optimistic.test.ts`
+**Location**: `src/features/shared/utils/tests/optimistic.test.ts`
Covers all utility functions with 8 test cases:
- ID uniqueness and format validation
diff --git a/archon-ui-main/src/App.tsx b/archon-ui-main/src/App.tsx
index 1d4e22d3..ea2539cc 100644
--- a/archon-ui-main/src/App.tsx
+++ b/archon-ui-main/src/App.tsx
@@ -2,7 +2,7 @@ import { useState, useEffect } from 'react';
import { BrowserRouter as Router, Routes, Route, Navigate } from 'react-router-dom';
import { QueryClientProvider } from '@tanstack/react-query';
import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
-import { queryClient } from './features/shared/queryClient';
+import { queryClient } from './features/shared/config/queryClient';
import { KnowledgeBasePage } from './pages/KnowledgeBasePage';
import { SettingsPage } from './pages/SettingsPage';
import { MCPPage } from './pages/MCPPage';
diff --git a/archon-ui-main/src/components/layout/hooks/useBackendHealth.ts b/archon-ui-main/src/components/layout/hooks/useBackendHealth.ts
index 626d23b6..59e9ccfa 100644
--- a/archon-ui-main/src/components/layout/hooks/useBackendHealth.ts
+++ b/archon-ui-main/src/components/layout/hooks/useBackendHealth.ts
@@ -1,6 +1,6 @@
import { useQuery } from "@tanstack/react-query";
-import { callAPIWithETag } from "../../../features/shared/apiWithEtag";
-import { createRetryLogic, STALE_TIMES } from "../../../features/shared/queryPatterns";
+import { callAPIWithETag } from "../../../features/shared/api/apiClient";
+import { createRetryLogic, STALE_TIMES } from "../../../features/shared/config/queryPatterns";
import type { HealthResponse } from "../types";
/**
diff --git a/archon-ui-main/src/features/knowledge/components/KnowledgeCard.tsx b/archon-ui-main/src/features/knowledge/components/KnowledgeCard.tsx
index bb49edd9..05c882de 100644
--- a/archon-ui-main/src/features/knowledge/components/KnowledgeCard.tsx
+++ b/archon-ui-main/src/features/knowledge/components/KnowledgeCard.tsx
@@ -1,6 +1,6 @@
/**
- * Enhanced Knowledge Card Component
- * Individual knowledge item card with excellent UX and inline progress
+ * Knowledge Card component
+ * Displays a knowledge item with inline progress and status UI
* Following the pattern from ProjectCard
*/
@@ -10,7 +10,7 @@ import { Clock, Code, ExternalLink, File, FileText, Globe } from "lucide-react";
import { useState } from "react";
import { KnowledgeCardProgress } from "../../progress/components/KnowledgeCardProgress";
import type { ActiveOperation } from "../../progress/types";
-import { isOptimistic } from "../../shared/optimistic";
+import { isOptimistic } from "@/features/shared/utils/optimistic";
import { StatPill } from "../../ui/primitives";
import { OptimisticIndicator } from "../../ui/primitives/OptimisticIndicator";
import { cn } from "../../ui/primitives/styles";
@@ -144,6 +144,7 @@ export const KnowledgeCard: React.FC = ({
};
return (
+ // biome-ignore lint/a11y/useSemanticElements: Card contains nested interactive elements (buttons, links) - using div to avoid invalid HTML nesting
);
- const tempItemId = optimisticItem.id;
// Update all summaries caches with optimistic data, respecting each cache's filter
const entries = queryClient.getQueriesData({
@@ -229,7 +228,7 @@ export function useCrawlUrl() {
});
// Return context for rollback and replacement
- return { previousSummaries, previousOperations, tempProgressId, tempItemId };
+ return { previousSummaries, previousOperations, tempProgressId };
},
onSuccess: (response, _variables, context) => {
// Replace temporary IDs with real ones from the server
@@ -313,7 +312,6 @@ export function useUploadDocument() {
previousSummaries?: Array<[readonly unknown[], KnowledgeItemsResponse | undefined]>;
previousOperations?: ActiveOperationsResponse;
tempProgressId: string;
- tempItemId: string;
}
>({
mutationFn: ({ file, metadata }: { file: File; metadata: UploadMetadata }) =>
@@ -352,7 +350,6 @@ export function useUploadDocument() {
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
} as Omit);
- const tempItemId = optimisticItem.id;
// Respect each cache's filter (knowledge_type, tags, etc.)
const entries = queryClient.getQueriesData({
@@ -410,7 +407,7 @@ export function useUploadDocument() {
};
});
- return { previousSummaries, previousOperations, tempProgressId, tempItemId };
+ return { previousSummaries, previousOperations, tempProgressId };
},
onSuccess: (response, _variables, context) => {
// Replace temporary IDs with real ones from the server
@@ -421,7 +418,7 @@ export function useUploadDocument() {
return {
...old,
items: old.items.map((item) => {
- if (item.id === context.tempItemId) {
+ if (item.source_id === context.tempProgressId) {
return {
...item,
source_id: response.progressId,
diff --git a/archon-ui-main/src/features/knowledge/inspector/components/KnowledgeInspector.tsx b/archon-ui-main/src/features/knowledge/inspector/components/KnowledgeInspector.tsx
index 69e8f050..334d4567 100644
--- a/archon-ui-main/src/features/knowledge/inspector/components/KnowledgeInspector.tsx
+++ b/archon-ui-main/src/features/knowledge/inspector/components/KnowledgeInspector.tsx
@@ -4,13 +4,13 @@
*/
import { useCallback, useEffect, useState } from "react";
+import { copyToClipboard } from "../../../shared/utils/clipboard";
import { InspectorDialog, InspectorDialogContent, InspectorDialogTitle } from "../../../ui/primitives";
import type { CodeExample, DocumentChunk, InspectorSelectedItem, KnowledgeItem } from "../../types";
import { useInspectorPagination } from "../hooks/useInspectorPagination";
import { ContentViewer } from "./ContentViewer";
import { InspectorHeader } from "./InspectorHeader";
import { InspectorSidebar } from "./InspectorSidebar";
-import { copyToClipboard } from "../../../shared/utils/clipboard";
interface KnowledgeInspectorProps {
item: KnowledgeItem;
diff --git a/archon-ui-main/src/features/knowledge/inspector/hooks/useInspectorPagination.ts b/archon-ui-main/src/features/knowledge/inspector/hooks/useInspectorPagination.ts
index 613aa19d..a1f286d5 100644
--- a/archon-ui-main/src/features/knowledge/inspector/hooks/useInspectorPagination.ts
+++ b/archon-ui-main/src/features/knowledge/inspector/hooks/useInspectorPagination.ts
@@ -5,7 +5,7 @@
import { useInfiniteQuery } from "@tanstack/react-query";
import { useMemo } from "react";
-import { STALE_TIMES } from "@/features/shared/queryPatterns";
+import { STALE_TIMES } from "@/features/shared/config/queryPatterns";
import { knowledgeKeys } from "../../hooks/useKnowledgeQueries";
import { knowledgeService } from "../../services";
import type { ChunksResponse, CodeExample, CodeExamplesResponse, DocumentChunk } from "../../types";
diff --git a/archon-ui-main/src/features/knowledge/services/knowledgeService.ts b/archon-ui-main/src/features/knowledge/services/knowledgeService.ts
index b9d6af06..cfab3f7f 100644
--- a/archon-ui-main/src/features/knowledge/services/knowledgeService.ts
+++ b/archon-ui-main/src/features/knowledge/services/knowledgeService.ts
@@ -3,8 +3,8 @@
* Handles all knowledge-related API operations using TanStack Query patterns
*/
-import { callAPIWithETag } from "../../shared/apiWithEtag";
-import { APIServiceError } from "../../shared/errors";
+import { callAPIWithETag } from "../../shared/api/apiClient";
+import { APIServiceError } from "../../shared/types/errors";
import type {
ChunksResponse,
CodeExamplesResponse,
diff --git a/archon-ui-main/src/features/knowledge/utils/tests/providerErrorHandler.test.ts b/archon-ui-main/src/features/knowledge/utils/tests/providerErrorHandler.test.ts
index 193e2444..9ddf380a 100644
--- a/archon-ui-main/src/features/knowledge/utils/tests/providerErrorHandler.test.ts
+++ b/archon-ui-main/src/features/knowledge/utils/tests/providerErrorHandler.test.ts
@@ -1,70 +1,70 @@
-import { describe, it, expect } from 'vitest';
-import { parseProviderError, getProviderErrorMessage, type ProviderError } from '../providerErrorHandler';
+import { describe, expect, it } from "vitest";
+import { getProviderErrorMessage, type ProviderError, parseProviderError } from "../providerErrorHandler";
-describe('providerErrorHandler', () => {
- describe('parseProviderError', () => {
- it('should handle basic Error objects', () => {
- const error = new Error('Basic error message');
+describe("providerErrorHandler", () => {
+ describe("parseProviderError", () => {
+ it("should handle basic Error objects", () => {
+ const error = new Error("Basic error message");
const result = parseProviderError(error);
- expect(result.message).toBe('Basic error message');
+ expect(result.message).toBe("Basic error message");
expect(result.isProviderError).toBeUndefined();
});
- it('should handle errors with statusCode property', () => {
- const error = { statusCode: 401, message: 'Unauthorized' };
+ it("should handle errors with statusCode property", () => {
+ const error = { statusCode: 401, message: "Unauthorized" };
const result = parseProviderError(error);
expect(result.statusCode).toBe(401);
- expect(result.message).toBe('Unauthorized');
+ expect(result.message).toBe("Unauthorized");
});
- it('should handle errors with status property', () => {
- const error = { status: 429, message: 'Rate limited' };
+ it("should handle errors with status property", () => {
+ const error = { status: 429, message: "Rate limited" };
const result = parseProviderError(error);
expect(result.statusCode).toBe(429);
- expect(result.message).toBe('Rate limited');
+ expect(result.message).toBe("Rate limited");
});
- it('should prioritize statusCode over status when both are present', () => {
- const error = { statusCode: 401, status: 429, message: 'Auth error' };
+ it("should prioritize statusCode over status when both are present", () => {
+ const error = { statusCode: 401, status: 429, message: "Auth error" };
const result = parseProviderError(error);
expect(result.statusCode).toBe(401);
});
- it('should parse structured provider errors from backend', () => {
+ it("should parse structured provider errors from backend", () => {
const error = {
message: JSON.stringify({
detail: {
- error_type: 'authentication_failed',
- provider: 'OpenAI',
- message: 'Invalid API key'
- }
- })
+ error_type: "authentication_failed",
+ provider: "OpenAI",
+ message: "Invalid API key",
+ },
+ }),
};
const result = parseProviderError(error);
expect(result.isProviderError).toBe(true);
- expect(result.provider).toBe('OpenAI');
- expect(result.errorType).toBe('authentication_failed');
- expect(result.message).toBe('Invalid API key');
+ expect(result.provider).toBe("OpenAI");
+ expect(result.errorType).toBe("authentication_failed");
+ expect(result.message).toBe("Invalid API key");
});
- it('should handle malformed JSON in message gracefully', () => {
+ it("should handle malformed JSON in message gracefully", () => {
const error = {
- message: 'invalid json { detail'
+ message: "invalid json { detail",
};
const result = parseProviderError(error);
expect(result.isProviderError).toBeUndefined();
- expect(result.message).toBe('invalid json { detail');
+ expect(result.message).toBe("invalid json { detail");
});
- it('should handle null and undefined inputs safely', () => {
+ it("should handle null and undefined inputs safely", () => {
expect(() => parseProviderError(null)).not.toThrow();
expect(() => parseProviderError(undefined)).not.toThrow();
@@ -75,7 +75,7 @@ describe('providerErrorHandler', () => {
expect(undefinedResult).toBeDefined();
});
- it('should handle empty objects', () => {
+ it("should handle empty objects", () => {
const result = parseProviderError({});
expect(result).toBeDefined();
@@ -83,171 +83,171 @@ describe('providerErrorHandler', () => {
expect(result.isProviderError).toBeUndefined();
});
- it('should handle primitive values', () => {
- expect(() => parseProviderError('string error')).not.toThrow();
+ it("should handle primitive values", () => {
+ expect(() => parseProviderError("string error")).not.toThrow();
expect(() => parseProviderError(42)).not.toThrow();
expect(() => parseProviderError(true)).not.toThrow();
});
- it('should handle structured errors without provider field', () => {
+ it("should handle structured errors without provider field", () => {
const error = {
message: JSON.stringify({
detail: {
- error_type: 'quota_exhausted',
- message: 'Usage limit exceeded'
- }
- })
+ error_type: "quota_exhausted",
+ message: "Usage limit exceeded",
+ },
+ }),
};
const result = parseProviderError(error);
expect(result.isProviderError).toBe(true);
- expect(result.provider).toBe('LLM'); // Default fallback
- expect(result.errorType).toBe('quota_exhausted');
- expect(result.message).toBe('Usage limit exceeded');
+ expect(result.provider).toBe("LLM"); // Default fallback
+ expect(result.errorType).toBe("quota_exhausted");
+ expect(result.message).toBe("Usage limit exceeded");
});
- it('should handle partial structured errors', () => {
+ it("should handle partial structured errors", () => {
const error = {
message: JSON.stringify({
detail: {
- error_type: 'rate_limit'
+ error_type: "rate_limit",
// Missing message field
- }
- })
+ },
+ }),
};
const result = parseProviderError(error);
expect(result.isProviderError).toBe(true);
- expect(result.errorType).toBe('rate_limit');
+ expect(result.errorType).toBe("rate_limit");
expect(result.message).toBe(error.message); // Falls back to original message
});
});
- describe('getProviderErrorMessage', () => {
- it('should return user-friendly message for authentication_failed', () => {
+ describe("getProviderErrorMessage", () => {
+ it("should return user-friendly message for authentication_failed", () => {
const error: ProviderError = {
- name: 'Error',
- message: 'Auth failed',
+ name: "Error",
+ message: "Auth failed",
isProviderError: true,
- provider: 'OpenAI',
- errorType: 'authentication_failed'
+ provider: "OpenAI",
+ errorType: "authentication_failed",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('Please verify your OpenAI API key in Settings.');
+ expect(result).toBe("Please verify your OpenAI API key in Settings.");
});
- it('should return user-friendly message for quota_exhausted', () => {
+ it("should return user-friendly message for quota_exhausted", () => {
const error: ProviderError = {
- name: 'Error',
- message: 'Quota exceeded',
+ name: "Error",
+ message: "Quota exceeded",
isProviderError: true,
- provider: 'Google AI',
- errorType: 'quota_exhausted'
+ provider: "Google AI",
+ errorType: "quota_exhausted",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('Google AI quota exhausted. Please check your billing settings.');
+ expect(result).toBe("Google AI quota exhausted. Please check your billing settings.");
});
- it('should return user-friendly message for rate_limit', () => {
+ it("should return user-friendly message for rate_limit", () => {
const error: ProviderError = {
- name: 'Error',
- message: 'Rate limited',
+ name: "Error",
+ message: "Rate limited",
isProviderError: true,
- provider: 'Anthropic',
- errorType: 'rate_limit'
+ provider: "Anthropic",
+ errorType: "rate_limit",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('Anthropic rate limit exceeded. Please wait and try again.');
+ expect(result).toBe("Anthropic rate limit exceeded. Please wait and try again.");
});
- it('should return generic provider message for unknown error types', () => {
+ it("should return generic provider message for unknown error types", () => {
const error: ProviderError = {
- name: 'Error',
- message: 'Unknown error',
+ name: "Error",
+ message: "Unknown error",
isProviderError: true,
- provider: 'OpenAI',
- errorType: 'unknown_error'
+ provider: "OpenAI",
+ errorType: "unknown_error",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('OpenAI API error. Please check your configuration.');
+ expect(result).toBe("OpenAI API error. Please check your configuration.");
});
- it('should use default provider when provider is missing', () => {
+ it("should use default provider when provider is missing", () => {
const error: ProviderError = {
- name: 'Error',
- message: 'Auth failed',
+ name: "Error",
+ message: "Auth failed",
isProviderError: true,
- errorType: 'authentication_failed'
+ errorType: "authentication_failed",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('Please verify your LLM API key in Settings.');
+ expect(result).toBe("Please verify your LLM API key in Settings.");
});
- it('should handle 401 status code for non-provider errors', () => {
- const error = { statusCode: 401, message: 'Unauthorized' };
+ it("should handle 401 status code for non-provider errors", () => {
+ const error = { statusCode: 401, message: "Unauthorized" };
const result = getProviderErrorMessage(error);
- expect(result).toBe('Please verify your API key in Settings.');
+ expect(result).toBe("Please verify your API key in Settings.");
});
- it('should return original message for non-provider errors', () => {
- const error = new Error('Network connection failed');
+ it("should return original message for non-provider errors", () => {
+ const error = new Error("Network connection failed");
const result = getProviderErrorMessage(error);
- expect(result).toBe('Network connection failed');
+ expect(result).toBe("Network connection failed");
});
- it('should return default message when no message is available', () => {
+ it("should return default message when no message is available", () => {
const error = {};
const result = getProviderErrorMessage(error);
- expect(result).toBe('An error occurred.');
+ expect(result).toBe("An error occurred.");
});
- it('should handle complex error objects with structured backend response', () => {
+ it("should handle complex error objects with structured backend response", () => {
const backendError = {
statusCode: 400,
message: JSON.stringify({
detail: {
- error_type: 'authentication_failed',
- provider: 'OpenAI',
- message: 'API key invalid or expired'
- }
- })
+ error_type: "authentication_failed",
+ provider: "OpenAI",
+ message: "API key invalid or expired",
+ },
+ }),
};
const result = getProviderErrorMessage(backendError);
- expect(result).toBe('Please verify your OpenAI API key in Settings.');
+ expect(result).toBe("Please verify your OpenAI API key in Settings.");
});
it('should handle edge case: message contains "detail" but is not JSON', () => {
const error = {
- message: 'Error detail: something went wrong'
+ message: "Error detail: something went wrong",
};
const result = getProviderErrorMessage(error);
- expect(result).toBe('Error detail: something went wrong');
+ expect(result).toBe("Error detail: something went wrong");
});
- it('should handle null and undefined gracefully', () => {
- expect(getProviderErrorMessage(null)).toBe('An error occurred.');
- expect(getProviderErrorMessage(undefined)).toBe('An error occurred.');
+ it("should handle null and undefined gracefully", () => {
+ expect(getProviderErrorMessage(null)).toBe("An error occurred.");
+ expect(getProviderErrorMessage(undefined)).toBe("An error occurred.");
});
});
- describe('TypeScript strict mode compliance', () => {
- it('should handle type-safe property access', () => {
+ describe("TypeScript strict mode compliance", () => {
+ it("should handle type-safe property access", () => {
// Test that our type guards work properly
const errorWithStatus = { statusCode: 500 };
- const errorWithMessage = { message: 'test' };
- const errorWithBoth = { statusCode: 401, message: 'unauthorized' };
+ const errorWithMessage = { message: "test" };
+ const errorWithBoth = { statusCode: 401, message: "unauthorized" };
// These should not throw TypeScript errors and should work correctly
expect(() => parseProviderError(errorWithStatus)).not.toThrow();
@@ -259,13 +259,13 @@ describe('providerErrorHandler', () => {
const result3 = parseProviderError(errorWithBoth);
expect(result1.statusCode).toBe(500);
- expect(result2.message).toBe('test');
+ expect(result2.message).toBe("test");
expect(result3.statusCode).toBe(401);
- expect(result3.message).toBe('unauthorized');
+ expect(result3.message).toBe("unauthorized");
});
- it('should handle objects without expected properties safely', () => {
- const objectWithoutStatus = { someOtherProperty: 'value' };
+ it("should handle objects without expected properties safely", () => {
+ const objectWithoutStatus = { someOtherProperty: "value" };
const objectWithoutMessage = { anotherProperty: 42 };
expect(() => parseProviderError(objectWithoutStatus)).not.toThrow();
@@ -278,4 +278,4 @@ describe('providerErrorHandler', () => {
expect(result2.message).toBeUndefined();
});
});
-});
\ No newline at end of file
+});
diff --git a/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx b/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
index 6f6a66df..0bedc7b2 100644
--- a/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
+++ b/archon-ui-main/src/features/knowledge/views/KnowledgeView.tsx
@@ -4,9 +4,9 @@
*/
import { useEffect, useMemo, useRef, useState } from "react";
+import { useToast } from "@/features/shared/hooks/useToast";
import { CrawlingProgress } from "../../progress/components/CrawlingProgress";
import type { ActiveOperation } from "../../progress/types";
-import { useToast } from "@/features/shared/hooks/useToast";
import { AddKnowledgeDialog } from "../components/AddKnowledgeDialog";
import { KnowledgeHeader } from "../components/KnowledgeHeader";
import { KnowledgeList } from "../components/KnowledgeList";
diff --git a/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx b/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
index c36b2f01..b5344bda 100644
--- a/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
+++ b/archon-ui-main/src/features/mcp/components/McpConfigSection.tsx
@@ -2,9 +2,9 @@ import { Copy, ExternalLink } from "lucide-react";
import type React from "react";
import { useState } from "react";
import { useToast } from "@/features/shared/hooks";
+import { copyToClipboard } from "../../shared/utils/clipboard";
import { Button, cn, glassmorphism, Tabs, TabsContent, TabsList, TabsTrigger } from "../../ui/primitives";
import type { McpServerConfig, McpServerStatus, SupportedIDE } from "../types";
-import { copyToClipboard } from "../../shared/utils/clipboard";
interface McpConfigSectionProps {
config?: McpServerConfig;
@@ -324,7 +324,8 @@ export const McpConfigSection: React.FC = ({ config, stat
Platform Note: The configuration below shows{" "}
{navigator.platform.toLowerCase().includes("win") ? "Windows" : "Linux/macOS"} format. Adjust paths
- according to your system. This setup is complex right now because Codex has some bugs with MCP currently.
+ according to your system. This setup is complex right now because Codex has some bugs with MCP
+ currently.
)}
diff --git a/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts b/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
index aef5ec68..eaf8f404 100644
--- a/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
+++ b/archon-ui-main/src/features/mcp/hooks/useMcpQueries.ts
@@ -1,6 +1,6 @@
import { useQuery } from "@tanstack/react-query";
-import { STALE_TIMES } from "../../shared/queryPatterns";
import { useSmartPolling } from "@/features/shared/hooks";
+import { STALE_TIMES } from "../../shared/config/queryPatterns";
import { mcpApi } from "../services";
// Query keys factory
diff --git a/archon-ui-main/src/features/mcp/services/mcpApi.ts b/archon-ui-main/src/features/mcp/services/mcpApi.ts
index 008c800c..d4b02ed4 100644
--- a/archon-ui-main/src/features/mcp/services/mcpApi.ts
+++ b/archon-ui-main/src/features/mcp/services/mcpApi.ts
@@ -1,4 +1,4 @@
-import { callAPIWithETag } from "../../shared/apiWithEtag";
+import { callAPIWithETag } from "../../shared/api/apiClient";
import type { McpClient, McpServerConfig, McpServerStatus, McpSessionInfo } from "../types";
export const mcpApi = {
diff --git a/archon-ui-main/src/features/progress/hooks/tests/useProgressQueries.test.ts b/archon-ui-main/src/features/progress/hooks/tests/useProgressQueries.test.ts
index 565919aa..d305a146 100644
--- a/archon-ui-main/src/features/progress/hooks/tests/useProgressQueries.test.ts
+++ b/archon-ui-main/src/features/progress/hooks/tests/useProgressQueries.test.ts
@@ -19,7 +19,7 @@ vi.mock("../../services", () => ({
}));
// Mock shared query patterns
-vi.mock("../../../shared/queryPatterns", () => ({
+vi.mock("../../../shared/config/queryPatterns", () => ({
DISABLED_QUERY_KEY: ["disabled"] as const,
STALE_TIMES: {
instant: 0,
diff --git a/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts b/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
index ae82ba17..1ebec2a9 100644
--- a/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
+++ b/archon-ui-main/src/features/progress/hooks/useProgressQueries.ts
@@ -5,9 +5,9 @@
import { type UseQueryResult, useQueries, useQuery, useQueryClient } from "@tanstack/react-query";
import { useEffect, useMemo, useRef } from "react";
-import { APIServiceError } from "../../shared/errors";
-import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/queryPatterns";
+import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/config/queryPatterns";
import { useSmartPolling } from "../../shared/hooks";
+import { APIServiceError } from "../../shared/types/errors";
import { progressService } from "../services";
import type { ActiveOperationsResponse, ProgressResponse, ProgressStatus } from "../types";
diff --git a/archon-ui-main/src/features/progress/services/progressService.ts b/archon-ui-main/src/features/progress/services/progressService.ts
index d3f6e61e..ba0e68ba 100644
--- a/archon-ui-main/src/features/progress/services/progressService.ts
+++ b/archon-ui-main/src/features/progress/services/progressService.ts
@@ -3,7 +3,7 @@
* Uses ETag support for efficient polling
*/
-import { callAPIWithETag } from "../../shared/apiWithEtag";
+import { callAPIWithETag } from "../../shared/api/apiClient";
import type { ActiveOperationsResponse, ProgressResponse } from "../types";
export const progressService = {
diff --git a/archon-ui-main/src/features/projects/components/ProjectCard.tsx b/archon-ui-main/src/features/projects/components/ProjectCard.tsx
index df990710..a6b62349 100644
--- a/archon-ui-main/src/features/projects/components/ProjectCard.tsx
+++ b/archon-ui-main/src/features/projects/components/ProjectCard.tsx
@@ -1,7 +1,7 @@
import { motion } from "framer-motion";
import { Activity, CheckCircle2, ListTodo } from "lucide-react";
import type React from "react";
-import { isOptimistic } from "../../shared/optimistic";
+import { isOptimistic } from "@/features/shared/utils/optimistic";
import { OptimisticIndicator } from "../../ui/primitives/OptimisticIndicator";
import { cn } from "../../ui/primitives/styles";
import type { Project } from "../types";
diff --git a/archon-ui-main/src/features/projects/documents/components/DocumentCard.tsx b/archon-ui-main/src/features/projects/documents/components/DocumentCard.tsx
index 25b12365..06241a46 100644
--- a/archon-ui-main/src/features/projects/documents/components/DocumentCard.tsx
+++ b/archon-ui-main/src/features/projects/documents/components/DocumentCard.tsx
@@ -13,9 +13,9 @@ import {
} from "lucide-react";
import type React from "react";
import { memo, useCallback, useState } from "react";
+import { copyToClipboard } from "../../../shared/utils/clipboard";
import { Button } from "../../../ui/primitives";
import type { DocumentCardProps, DocumentType } from "../types";
-import { copyToClipboard } from "../../../shared/utils/clipboard";
const getDocumentIcon = (type?: DocumentType) => {
switch (type) {
diff --git a/archon-ui-main/src/features/projects/documents/hooks/useDocumentQueries.ts b/archon-ui-main/src/features/projects/documents/hooks/useDocumentQueries.ts
index 0a7d23ee..00c6eea6 100644
--- a/archon-ui-main/src/features/projects/documents/hooks/useDocumentQueries.ts
+++ b/archon-ui-main/src/features/projects/documents/hooks/useDocumentQueries.ts
@@ -1,5 +1,5 @@
import { useQuery } from "@tanstack/react-query";
-import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../../shared/queryPatterns";
+import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../../shared/config/queryPatterns";
import { projectService } from "../../services";
import type { ProjectDocument } from "../types";
diff --git a/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts b/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
index ae216e66..946647ab 100644
--- a/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
+++ b/archon-ui-main/src/features/projects/hooks/useProjectQueries.ts
@@ -1,13 +1,13 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { useSmartPolling } from "@/features/shared/hooks";
+import { useToast } from "@/features/shared/hooks/useToast";
import {
createOptimisticEntity,
type OptimisticEntity,
removeDuplicateEntities,
replaceOptimisticEntity,
-} from "@/features/shared/optimistic";
-import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/queryPatterns";
-import { useSmartPolling } from "@/features/shared/hooks";
-import { useToast } from "@/features/shared/hooks/useToast";
+} from "@/features/shared/utils/optimistic";
+import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../shared/config/queryPatterns";
import { projectService } from "../services";
import type { CreateProjectRequest, Project, UpdateProjectRequest } from "../types";
@@ -36,9 +36,7 @@ export function useProjects() {
// Fetch project features
export function useProjectFeatures(projectId: string | undefined) {
- // TODO: Phase 4 - Add explicit typing: useQuery>>
- // See PRPs/local/frontend-state-management-refactor.md Phase 4: Configure Request Deduplication
- return useQuery({
+ return useQuery>>({
queryKey: projectId ? projectKeys.features(projectId) : DISABLED_QUERY_KEY,
queryFn: () => (projectId ? projectService.getProjectFeatures(projectId) : Promise.reject("No project ID")),
enabled: !!projectId,
@@ -208,6 +206,8 @@ export function useDeleteProject() {
// Don't refetch on success - trust optimistic update
// Only remove the specific project's detail data (including nested keys)
queryClient.removeQueries({ queryKey: projectKeys.detail(projectId), exact: false });
+ // Also remove the project's feature queries
+ queryClient.removeQueries({ queryKey: projectKeys.features(projectId), exact: false });
showToast("Project deleted successfully", "success");
},
});
diff --git a/archon-ui-main/src/features/projects/services/projectService.ts b/archon-ui-main/src/features/projects/services/projectService.ts
index f74675ca..58b1f3e6 100644
--- a/archon-ui-main/src/features/projects/services/projectService.ts
+++ b/archon-ui-main/src/features/projects/services/projectService.ts
@@ -3,8 +3,8 @@
* Focused service for project CRUD operations only
*/
-import { callAPIWithETag } from "../../shared/apiWithEtag";
-import { formatZodErrors, ValidationError } from "../../shared/errors";
+import { callAPIWithETag } from "../../shared/api/apiClient";
+import { formatZodErrors, ValidationError } from "../../shared/types/errors";
import { validateCreateProject, validateUpdateProject } from "../schemas";
import { formatRelativeTime } from "../shared/api";
import type { CreateProjectRequest, Project, ProjectFeatures, UpdateProjectRequest } from "../types";
diff --git a/archon-ui-main/src/features/projects/tasks/components/TaskCard.tsx b/archon-ui-main/src/features/projects/tasks/components/TaskCard.tsx
index 913964c6..c8e09464 100644
--- a/archon-ui-main/src/features/projects/tasks/components/TaskCard.tsx
+++ b/archon-ui-main/src/features/projects/tasks/components/TaskCard.tsx
@@ -2,7 +2,7 @@ import { Tag } from "lucide-react";
import type React from "react";
import { useCallback } from "react";
import { useDrag, useDrop } from "react-dnd";
-import { isOptimistic } from "../../../shared/optimistic";
+import { isOptimistic } from "@/features/shared/utils/optimistic";
import { OptimisticIndicator } from "../../../ui/primitives/OptimisticIndicator";
import { useTaskActions } from "../hooks";
import type { Assignee, Task, TaskPriority } from "../types";
diff --git a/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts b/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
index 55b4bbd0..2020a96d 100644
--- a/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
+++ b/archon-ui-main/src/features/projects/tasks/hooks/useTaskQueries.ts
@@ -1,11 +1,11 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import {
createOptimisticEntity,
- replaceOptimisticEntity,
- removeDuplicateEntities,
type OptimisticEntity,
-} from "@/features/shared/optimistic";
-import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../../shared/queryPatterns";
+ removeDuplicateEntities,
+ replaceOptimisticEntity,
+} from "@/features/shared/utils/optimistic";
+import { DISABLED_QUERY_KEY, STALE_TIMES } from "../../../shared/config/queryPatterns";
import { useSmartPolling } from "../../../shared/hooks";
import { useToast } from "../../../shared/hooks/useToast";
import { taskService } from "../services";
diff --git a/archon-ui-main/src/features/projects/tasks/services/taskService.ts b/archon-ui-main/src/features/projects/tasks/services/taskService.ts
index 223bdb73..dc2db1ed 100644
--- a/archon-ui-main/src/features/projects/tasks/services/taskService.ts
+++ b/archon-ui-main/src/features/projects/tasks/services/taskService.ts
@@ -3,8 +3,8 @@
* Focused service for task CRUD operations only
*/
-import { callAPIWithETag } from "../../../shared/apiWithEtag";
-import { formatZodErrors, ValidationError } from "../../../shared/errors";
+import { callAPIWithETag } from "../../../shared/api/apiClient";
+import { formatZodErrors, ValidationError } from "../../../shared/types/errors";
import { validateCreateTask, validateUpdateTask, validateUpdateTaskStatus } from "../schemas";
import type { CreateTaskRequest, DatabaseTaskStatus, Task, TaskCounts, UpdateTaskRequest } from "../types";
diff --git a/archon-ui-main/src/features/projects/tasks/services/tests/taskService.test.ts b/archon-ui-main/src/features/projects/tasks/services/tests/taskService.test.ts
index d86cc94d..d4215814 100644
--- a/archon-ui-main/src/features/projects/tasks/services/tests/taskService.test.ts
+++ b/archon-ui-main/src/features/projects/tasks/services/tests/taskService.test.ts
@@ -1,10 +1,10 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
-import { callAPIWithETag } from "../../../../shared/apiWithEtag";
+import { callAPIWithETag } from "../../../../shared/api/apiClient";
import type { CreateTaskRequest, DatabaseTaskStatus, Task, UpdateTaskRequest } from "../../types";
import { taskService } from "../taskService";
// Mock the API call
-vi.mock("../../../../shared/apiWithEtag", () => ({
+vi.mock("../../../../shared/api/apiClient", () => ({
callAPIWithETag: vi.fn(),
}));
diff --git a/archon-ui-main/src/features/settings/migrations/components/MigrationStatusCard.tsx b/archon-ui-main/src/features/settings/migrations/components/MigrationStatusCard.tsx
index 2b29531c..be4317a5 100644
--- a/archon-ui-main/src/features/settings/migrations/components/MigrationStatusCard.tsx
+++ b/archon-ui-main/src/features/settings/migrations/components/MigrationStatusCard.tsx
@@ -29,7 +29,8 @@ export function MigrationStatusCard() {