mirror of
https://github.com/coleam00/Archon.git
synced 2026-01-10 08:38:27 -05:00
Merge branch 'ui-changes-backup' into merge-ui-socket-fixes
This commit is contained in:
@@ -265,9 +265,10 @@ When connected to Cursor/Windsurf:
|
|||||||
|
|
||||||
- `archon:perform_rag_query` - Search knowledge base
|
- `archon:perform_rag_query` - Search knowledge base
|
||||||
- `archon:search_code_examples` - Find code snippets
|
- `archon:search_code_examples` - Find code snippets
|
||||||
- `archon:manage_project` - Project operations
|
- `archon:create_project`, `archon:list_projects`, `archon:get_project`, `archon:update_project`, `archon:delete_project` - Project operations
|
||||||
- `archon:manage_task` - Task management
|
- `archon:create_task`, `archon:list_tasks`, `archon:get_task`, `archon:update_task`, `archon:delete_task` - Task management
|
||||||
- `archon:get_available_sources` - List knowledge sources
|
- `archon:get_available_sources` - List knowledge sources
|
||||||
|
- `archon:get_project_features` - Get project features
|
||||||
|
|
||||||
## Important Notes
|
## Important Notes
|
||||||
|
|
||||||
|
|||||||
21
archon-ui-main/package-lock.json
generated
21
archon-ui-main/package-lock.json
generated
@@ -12,6 +12,7 @@
|
|||||||
"@milkdown/kit": "^7.5.0",
|
"@milkdown/kit": "^7.5.0",
|
||||||
"@milkdown/plugin-history": "^7.5.0",
|
"@milkdown/plugin-history": "^7.5.0",
|
||||||
"@milkdown/preset-commonmark": "^7.5.0",
|
"@milkdown/preset-commonmark": "^7.5.0",
|
||||||
|
"@types/uuid": "^10.0.0",
|
||||||
"@xyflow/react": "^12.3.0",
|
"@xyflow/react": "^12.3.0",
|
||||||
"clsx": "latest",
|
"clsx": "latest",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
@@ -26,6 +27,7 @@
|
|||||||
"react-router-dom": "^6.26.2",
|
"react-router-dom": "^6.26.2",
|
||||||
"socket.io-client": "^4.8.1",
|
"socket.io-client": "^4.8.1",
|
||||||
"tailwind-merge": "latest",
|
"tailwind-merge": "latest",
|
||||||
|
"uuid": "^11.1.0",
|
||||||
"zod": "^3.25.46"
|
"zod": "^3.25.46"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -2977,6 +2979,12 @@
|
|||||||
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
|
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/uuid": {
|
||||||
|
"version": "10.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-10.0.0.tgz",
|
||||||
|
"integrity": "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||||
"version": "5.62.0",
|
"version": "5.62.0",
|
||||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz",
|
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz",
|
||||||
@@ -10025,6 +10033,19 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/uuid": {
|
||||||
|
"version": "11.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
|
||||||
|
"integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==",
|
||||||
|
"funding": [
|
||||||
|
"https://github.com/sponsors/broofa",
|
||||||
|
"https://github.com/sponsors/ctavan"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"uuid": "dist/esm/bin/uuid"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/v8-compile-cache-lib": {
|
"node_modules/v8-compile-cache-lib": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
"@milkdown/kit": "^7.5.0",
|
"@milkdown/kit": "^7.5.0",
|
||||||
"@milkdown/plugin-history": "^7.5.0",
|
"@milkdown/plugin-history": "^7.5.0",
|
||||||
"@milkdown/preset-commonmark": "^7.5.0",
|
"@milkdown/preset-commonmark": "^7.5.0",
|
||||||
|
"@types/uuid": "^10.0.0",
|
||||||
"@xyflow/react": "^12.3.0",
|
"@xyflow/react": "^12.3.0",
|
||||||
"clsx": "latest",
|
"clsx": "latest",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
@@ -36,6 +37,7 @@
|
|||||||
"react-router-dom": "^6.26.2",
|
"react-router-dom": "^6.26.2",
|
||||||
"socket.io-client": "^4.8.1",
|
"socket.io-client": "^4.8.1",
|
||||||
"tailwind-merge": "latest",
|
"tailwind-merge": "latest",
|
||||||
|
"uuid": "^11.1.0",
|
||||||
"zod": "^3.25.46"
|
"zod": "^3.25.46"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
319
archon-ui-main/src/components/ErrorBoundary.tsx
Normal file
319
archon-ui-main/src/components/ErrorBoundary.tsx
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
/**
|
||||||
|
* Error Boundary Component with React 18 Features
|
||||||
|
* Provides fallback UI and error recovery options
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, { Component, ErrorInfo, ReactNode, Suspense } from 'react';
|
||||||
|
import { AlertTriangle, RefreshCw, Home } from 'lucide-react';
|
||||||
|
|
||||||
|
interface ErrorBoundaryState {
|
||||||
|
hasError: boolean;
|
||||||
|
error: Error | null;
|
||||||
|
errorInfo: ErrorInfo | null;
|
||||||
|
errorCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ErrorBoundaryProps {
|
||||||
|
children: ReactNode;
|
||||||
|
fallback?: (error: Error, errorInfo: ErrorInfo, reset: () => void) => ReactNode;
|
||||||
|
onError?: (error: Error, errorInfo: ErrorInfo) => void;
|
||||||
|
resetKeys?: Array<string | number>;
|
||||||
|
resetOnPropsChange?: boolean;
|
||||||
|
isolate?: boolean;
|
||||||
|
level?: 'page' | 'section' | 'component';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced Error Boundary with recovery options
|
||||||
|
*/
|
||||||
|
export class ErrorBoundary extends Component<ErrorBoundaryProps, ErrorBoundaryState> {
|
||||||
|
private resetTimeoutId: NodeJS.Timeout | null = null;
|
||||||
|
private previousResetKeys: Array<string | number> = [];
|
||||||
|
|
||||||
|
constructor(props: ErrorBoundaryProps) {
|
||||||
|
super(props);
|
||||||
|
this.state = {
|
||||||
|
hasError: false,
|
||||||
|
error: null,
|
||||||
|
errorInfo: null,
|
||||||
|
errorCount: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static getDerivedStateFromError(error: Error): Partial<ErrorBoundaryState> {
|
||||||
|
return {
|
||||||
|
hasError: true,
|
||||||
|
error
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidCatch(error: Error, errorInfo: ErrorInfo): void {
|
||||||
|
const { onError } = this.props;
|
||||||
|
|
||||||
|
// Log error details
|
||||||
|
console.error('Error caught by boundary:', error);
|
||||||
|
console.error('Error info:', errorInfo);
|
||||||
|
|
||||||
|
// Update state with error details
|
||||||
|
this.setState(prevState => ({
|
||||||
|
errorInfo,
|
||||||
|
errorCount: prevState.errorCount + 1
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Call error handler if provided
|
||||||
|
if (onError) {
|
||||||
|
onError(error, errorInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// In alpha, we want to fail fast and require explicit user action
|
||||||
|
// Log detailed error information for debugging
|
||||||
|
console.error('[ErrorBoundary] Component error caught:', {
|
||||||
|
error: error.toString(),
|
||||||
|
stack: error.stack,
|
||||||
|
componentStack: errorInfo.componentStack,
|
||||||
|
errorCount: this.state.errorCount + 1,
|
||||||
|
isolate: this.props.isolate
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate(prevProps: ErrorBoundaryProps): void {
|
||||||
|
const { resetKeys, resetOnPropsChange } = this.props;
|
||||||
|
const { hasError } = this.state;
|
||||||
|
|
||||||
|
// Reset on prop changes if enabled
|
||||||
|
if (hasError && prevProps.children !== this.props.children && resetOnPropsChange) {
|
||||||
|
this.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset on resetKeys change
|
||||||
|
if (hasError && resetKeys && this.previousResetKeys !== resetKeys) {
|
||||||
|
const hasResetKeyChanged = resetKeys.some(
|
||||||
|
(key, index) => key !== this.previousResetKeys[index]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (hasResetKeyChanged) {
|
||||||
|
this.reset();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.previousResetKeys = resetKeys || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
componentWillUnmount(): void {
|
||||||
|
if (this.resetTimeoutId) {
|
||||||
|
clearTimeout(this.resetTimeoutId);
|
||||||
|
this.resetTimeoutId = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reset = (): void => {
|
||||||
|
if (this.resetTimeoutId) {
|
||||||
|
clearTimeout(this.resetTimeoutId);
|
||||||
|
this.resetTimeoutId = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setState({
|
||||||
|
hasError: false,
|
||||||
|
error: null,
|
||||||
|
errorInfo: null
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
render(): ReactNode {
|
||||||
|
const { hasError, error, errorInfo, errorCount } = this.state;
|
||||||
|
const { children, fallback, level = 'component' } = this.props;
|
||||||
|
|
||||||
|
if (hasError && error && errorInfo) {
|
||||||
|
// Use custom fallback if provided
|
||||||
|
if (fallback) {
|
||||||
|
return fallback(error, errorInfo, this.reset);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default fallback UI based on level
|
||||||
|
return <DefaultErrorFallback
|
||||||
|
error={error}
|
||||||
|
errorInfo={errorInfo}
|
||||||
|
reset={this.reset}
|
||||||
|
level={level}
|
||||||
|
errorCount={errorCount}
|
||||||
|
/>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return children;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default error fallback component
|
||||||
|
*/
|
||||||
|
interface DefaultErrorFallbackProps {
|
||||||
|
error: Error;
|
||||||
|
errorInfo: ErrorInfo;
|
||||||
|
reset: () => void;
|
||||||
|
level: 'page' | 'section' | 'component';
|
||||||
|
errorCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DefaultErrorFallback: React.FC<DefaultErrorFallbackProps> = ({
|
||||||
|
error,
|
||||||
|
errorInfo,
|
||||||
|
reset,
|
||||||
|
level,
|
||||||
|
errorCount
|
||||||
|
}) => {
|
||||||
|
const isPageLevel = level === 'page';
|
||||||
|
const isSectionLevel = level === 'section';
|
||||||
|
|
||||||
|
if (level === 'component') {
|
||||||
|
// Minimal component-level error
|
||||||
|
return (
|
||||||
|
<div className="p-4 bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<AlertTriangle className="w-4 h-4 text-red-500" />
|
||||||
|
<span className="text-sm text-red-700 dark:text-red-300">
|
||||||
|
Component error occurred
|
||||||
|
</span>
|
||||||
|
<button
|
||||||
|
onClick={reset}
|
||||||
|
className="ml-auto text-xs text-red-600 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300"
|
||||||
|
>
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`
|
||||||
|
${isPageLevel ? 'min-h-screen' : isSectionLevel ? 'min-h-[400px]' : 'min-h-[200px]'}
|
||||||
|
flex items-center justify-center p-8
|
||||||
|
bg-gradient-to-br from-red-50 to-orange-50
|
||||||
|
dark:from-gray-900 dark:to-gray-800
|
||||||
|
`}>
|
||||||
|
<div className="max-w-2xl w-full">
|
||||||
|
<div className="bg-white dark:bg-gray-800 rounded-2xl shadow-xl p-8">
|
||||||
|
{/* Error Icon */}
|
||||||
|
<div className="flex justify-center mb-6">
|
||||||
|
<div className="p-4 bg-red-100 dark:bg-red-900/30 rounded-full">
|
||||||
|
<AlertTriangle className="w-12 h-12 text-red-500" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Error Title */}
|
||||||
|
<h1 className="text-2xl font-bold text-center text-gray-900 dark:text-white mb-2">
|
||||||
|
{isPageLevel ? 'Something went wrong' : 'An error occurred'}
|
||||||
|
</h1>
|
||||||
|
|
||||||
|
{/* Error Message */}
|
||||||
|
<p className="text-center text-gray-600 dark:text-gray-400 mb-6">
|
||||||
|
{error.message || 'An unexpected error occurred while rendering this component.'}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/* Retry Count */}
|
||||||
|
{errorCount > 1 && (
|
||||||
|
<p className="text-center text-sm text-gray-500 dark:text-gray-500 mb-4">
|
||||||
|
This error has occurred {errorCount} times
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Action Buttons */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-3 justify-center">
|
||||||
|
<button
|
||||||
|
onClick={reset}
|
||||||
|
className="
|
||||||
|
flex items-center justify-center gap-2 px-6 py-3
|
||||||
|
bg-blue-500 hover:bg-blue-600
|
||||||
|
text-white font-medium rounded-lg
|
||||||
|
transition-colors duration-150
|
||||||
|
"
|
||||||
|
>
|
||||||
|
<RefreshCw className="w-4 h-4" />
|
||||||
|
Try Again
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{isPageLevel && (
|
||||||
|
<button
|
||||||
|
onClick={() => window.location.href = '/'}
|
||||||
|
className="
|
||||||
|
flex items-center justify-center gap-2 px-6 py-3
|
||||||
|
bg-gray-200 hover:bg-gray-300
|
||||||
|
dark:bg-gray-700 dark:hover:bg-gray-600
|
||||||
|
text-gray-700 dark:text-gray-200 font-medium rounded-lg
|
||||||
|
transition-colors duration-150
|
||||||
|
"
|
||||||
|
>
|
||||||
|
<Home className="w-4 h-4" />
|
||||||
|
Go Home
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Error Details (Development Only) */}
|
||||||
|
{process.env.NODE_ENV === 'development' && (
|
||||||
|
<details className="mt-8 p-4 bg-gray-100 dark:bg-gray-900 rounded-lg">
|
||||||
|
<summary className="cursor-pointer text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
Error Details (Development Only)
|
||||||
|
</summary>
|
||||||
|
<div className="mt-4 space-y-2">
|
||||||
|
<div>
|
||||||
|
<p className="text-xs font-mono text-gray-600 dark:text-gray-400">
|
||||||
|
{error.stack}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<p className="text-xs text-gray-500 dark:text-gray-500">
|
||||||
|
Component Stack:
|
||||||
|
</p>
|
||||||
|
<p className="text-xs font-mono text-gray-600 dark:text-gray-400">
|
||||||
|
{errorInfo.componentStack}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</details>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Suspense Error Boundary - combines Suspense with Error Boundary
|
||||||
|
*/
|
||||||
|
interface SuspenseErrorBoundaryProps {
|
||||||
|
children: ReactNode;
|
||||||
|
fallback?: ReactNode;
|
||||||
|
errorFallback?: (error: Error, errorInfo: ErrorInfo, reset: () => void) => ReactNode;
|
||||||
|
level?: 'page' | 'section' | 'component';
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SuspenseErrorBoundary: React.FC<SuspenseErrorBoundaryProps> = ({
|
||||||
|
children,
|
||||||
|
fallback,
|
||||||
|
errorFallback,
|
||||||
|
level = 'component'
|
||||||
|
}) => {
|
||||||
|
const defaultFallback = (
|
||||||
|
<div className="flex items-center justify-center p-8">
|
||||||
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-500"></div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ErrorBoundary fallback={errorFallback} level={level}>
|
||||||
|
<Suspense fallback={fallback || defaultFallback}>
|
||||||
|
{children}
|
||||||
|
</Suspense>
|
||||||
|
</ErrorBoundary>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hook to reset error boundaries
|
||||||
|
*/
|
||||||
|
export function useErrorHandler(): (error: Error) => void {
|
||||||
|
return (error: Error) => {
|
||||||
|
throw error;
|
||||||
|
};
|
||||||
|
}
|
||||||
365
archon-ui-main/src/components/SearchableList.tsx
Normal file
365
archon-ui-main/src/components/SearchableList.tsx
Normal file
@@ -0,0 +1,365 @@
|
|||||||
|
/**
|
||||||
|
* SearchableList Component with React 18 Concurrent Features
|
||||||
|
* Uses useTransition for non-blocking search updates
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React, { useState, useTransition, useMemo, useCallback } from 'react';
|
||||||
|
import { Search, X, Loader2 } from 'lucide-react';
|
||||||
|
|
||||||
|
export interface SearchableListItem {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
description?: string;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchableListProps<T extends SearchableListItem> {
|
||||||
|
items: T[];
|
||||||
|
onItemClick?: (item: T) => void;
|
||||||
|
onItemSelect?: (item: T) => void;
|
||||||
|
renderItem?: (item: T, isHighlighted: boolean) => React.ReactNode;
|
||||||
|
searchFields?: (keyof T)[];
|
||||||
|
placeholder?: string;
|
||||||
|
emptyMessage?: string;
|
||||||
|
className?: string;
|
||||||
|
itemClassName?: string;
|
||||||
|
enableMultiSelect?: boolean;
|
||||||
|
selectedItems?: T[];
|
||||||
|
virtualize?: boolean;
|
||||||
|
virtualizeThreshold?: number;
|
||||||
|
// Virtualization configuration
|
||||||
|
itemHeight?: number; // Height of each item in pixels (default: 80)
|
||||||
|
containerHeight?: number; // Height of scrollable container in pixels (default: 600)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SearchableList with React 18 concurrent features
|
||||||
|
*/
|
||||||
|
export function SearchableList<T extends SearchableListItem>({
|
||||||
|
items,
|
||||||
|
onItemClick,
|
||||||
|
onItemSelect,
|
||||||
|
renderItem,
|
||||||
|
searchFields = ['title', 'description'] as (keyof T)[],
|
||||||
|
placeholder = 'Search...',
|
||||||
|
emptyMessage = 'No items found',
|
||||||
|
className = '',
|
||||||
|
itemClassName = '',
|
||||||
|
enableMultiSelect = false,
|
||||||
|
selectedItems = [],
|
||||||
|
virtualize = true,
|
||||||
|
virtualizeThreshold = 100,
|
||||||
|
itemHeight = 80,
|
||||||
|
containerHeight = 600
|
||||||
|
}: SearchableListProps<T>) {
|
||||||
|
const [searchQuery, setSearchQuery] = useState('');
|
||||||
|
const [highlightedId, setHighlightedId] = useState<string | null>(null);
|
||||||
|
const [selectedIds, setSelectedIds] = useState<Set<string>>(
|
||||||
|
new Set(selectedItems.map(item => item.id))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use transition for non-blocking search updates
|
||||||
|
const [isPending, startTransition] = useTransition();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter items based on search query with transition
|
||||||
|
*/
|
||||||
|
const filteredItems = useMemo(() => {
|
||||||
|
if (!searchQuery.trim()) {
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = searchQuery.toLowerCase();
|
||||||
|
return items.filter(item => {
|
||||||
|
return searchFields.some(field => {
|
||||||
|
const value = item[field];
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
return value.toLowerCase().includes(query);
|
||||||
|
}
|
||||||
|
if (value && typeof value === 'object') {
|
||||||
|
return JSON.stringify(value).toLowerCase().includes(query);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, [items, searchQuery, searchFields]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle search input with transition
|
||||||
|
*/
|
||||||
|
const handleSearchChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
const value = e.target.value;
|
||||||
|
|
||||||
|
// Use transition for non-urgent update
|
||||||
|
startTransition(() => {
|
||||||
|
setSearchQuery(value);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear search
|
||||||
|
*/
|
||||||
|
const handleClearSearch = useCallback(() => {
|
||||||
|
startTransition(() => {
|
||||||
|
setSearchQuery('');
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle item selection
|
||||||
|
*/
|
||||||
|
const handleItemSelect = useCallback((item: T) => {
|
||||||
|
if (enableMultiSelect) {
|
||||||
|
setSelectedIds(prev => {
|
||||||
|
const next = new Set(prev);
|
||||||
|
if (next.has(item.id)) {
|
||||||
|
next.delete(item.id);
|
||||||
|
} else {
|
||||||
|
next.add(item.id);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setSelectedIds(new Set([item.id]));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onItemSelect) {
|
||||||
|
onItemSelect(item);
|
||||||
|
}
|
||||||
|
}, [enableMultiSelect, onItemSelect]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle item click
|
||||||
|
*/
|
||||||
|
const handleItemClick = useCallback((item: T) => {
|
||||||
|
if (onItemClick) {
|
||||||
|
onItemClick(item);
|
||||||
|
} else {
|
||||||
|
handleItemSelect(item);
|
||||||
|
}
|
||||||
|
}, [onItemClick, handleItemSelect]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default item renderer
|
||||||
|
*/
|
||||||
|
const defaultRenderItem = useCallback((item: T, isHighlighted: boolean) => {
|
||||||
|
const isSelected = selectedIds.has(item.id);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className={`
|
||||||
|
p-3 cursor-pointer transition-all duration-150
|
||||||
|
${isHighlighted ? 'bg-blue-50 dark:bg-blue-900/20' : ''}
|
||||||
|
${isSelected ? 'bg-blue-100 dark:bg-blue-900/30 border-l-4 border-blue-500' : ''}
|
||||||
|
hover:bg-gray-50 dark:hover:bg-gray-800
|
||||||
|
${itemClassName}
|
||||||
|
`}
|
||||||
|
onMouseEnter={() => setHighlightedId(item.id)}
|
||||||
|
onMouseLeave={() => setHighlightedId(null)}
|
||||||
|
onClick={() => handleItemClick(item)}
|
||||||
|
>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<h4 className="text-sm font-medium text-gray-900 dark:text-gray-100 truncate">
|
||||||
|
{item.title}
|
||||||
|
</h4>
|
||||||
|
{item.description && (
|
||||||
|
<p className="mt-1 text-xs text-gray-500 dark:text-gray-400 line-clamp-2">
|
||||||
|
{item.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{enableMultiSelect && (
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={isSelected}
|
||||||
|
onChange={() => handleItemSelect(item)}
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
className="ml-3 mt-1 h-4 w-4 text-blue-600 rounded focus:ring-blue-500"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}, [selectedIds, itemClassName, handleItemClick, handleItemSelect, enableMultiSelect]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Virtualized list renderer for large lists
|
||||||
|
*/
|
||||||
|
const [scrollTop, setScrollTop] = useState(0);
|
||||||
|
|
||||||
|
const renderVirtualizedList = useCallback(() => {
|
||||||
|
// Simple virtualization with configurable dimensions
|
||||||
|
const visibleCount = Math.ceil(containerHeight / itemHeight);
|
||||||
|
|
||||||
|
const startIndex = Math.floor(scrollTop / itemHeight);
|
||||||
|
const endIndex = Math.min(startIndex + visibleCount + 1, filteredItems.length);
|
||||||
|
const visibleItems = filteredItems.slice(startIndex, endIndex);
|
||||||
|
const totalHeight = filteredItems.length * itemHeight;
|
||||||
|
const offsetY = startIndex * itemHeight;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className="relative overflow-auto"
|
||||||
|
style={{ height: containerHeight }}
|
||||||
|
onScroll={(e) => setScrollTop(e.currentTarget.scrollTop)}
|
||||||
|
>
|
||||||
|
<div style={{ height: totalHeight }}>
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
transform: `translateY(${offsetY}px)`
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{visibleItems.map(item => (
|
||||||
|
<div key={item.id} style={{ height: itemHeight }}>
|
||||||
|
{renderItem ? renderItem(item, highlightedId === item.id) : defaultRenderItem(item, highlightedId === item.id)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}, [filteredItems, highlightedId, renderItem, defaultRenderItem, containerHeight, itemHeight, scrollTop]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Regular list renderer
|
||||||
|
*/
|
||||||
|
const renderRegularList = useCallback(() => {
|
||||||
|
return (
|
||||||
|
<div className="divide-y divide-gray-200 dark:divide-gray-700">
|
||||||
|
{filteredItems.map(item => (
|
||||||
|
<div key={item.id}>
|
||||||
|
{renderItem ? renderItem(item, highlightedId === item.id) : defaultRenderItem(item, highlightedId === item.id)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}, [filteredItems, highlightedId, renderItem, defaultRenderItem]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`flex flex-col ${className}`}>
|
||||||
|
{/* Search Bar */}
|
||||||
|
<div className="relative mb-4">
|
||||||
|
<div className="relative">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={handleSearchChange}
|
||||||
|
placeholder={placeholder}
|
||||||
|
className={`
|
||||||
|
w-full pl-10 pr-10 py-2
|
||||||
|
border border-gray-300 dark:border-gray-600
|
||||||
|
rounded-lg
|
||||||
|
bg-white dark:bg-gray-800
|
||||||
|
text-gray-900 dark:text-gray-100
|
||||||
|
placeholder-gray-500 dark:placeholder-gray-400
|
||||||
|
focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent
|
||||||
|
transition-all duration-150
|
||||||
|
${isPending ? 'opacity-70' : ''}
|
||||||
|
`}
|
||||||
|
/>
|
||||||
|
<div className="absolute inset-y-0 left-0 pl-3 flex items-center pointer-events-none">
|
||||||
|
{isPending ? (
|
||||||
|
<Loader2 className="h-4 w-4 text-gray-400 animate-spin" />
|
||||||
|
) : (
|
||||||
|
<Search className="h-4 w-4 text-gray-400" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{searchQuery && (
|
||||||
|
<button
|
||||||
|
onClick={handleClearSearch}
|
||||||
|
className="absolute inset-y-0 right-0 pr-3 flex items-center"
|
||||||
|
>
|
||||||
|
<X className="h-4 w-4 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300" />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{isPending && (
|
||||||
|
<div className="absolute top-full left-0 mt-1 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
Searching...
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Results Count */}
|
||||||
|
{searchQuery && (
|
||||||
|
<div className="mb-2 text-sm text-gray-600 dark:text-gray-400">
|
||||||
|
{filteredItems.length} result{filteredItems.length !== 1 ? 's' : ''} found
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* List Container */}
|
||||||
|
<div className="flex-1 overflow-auto">
|
||||||
|
{filteredItems.length === 0 ? (
|
||||||
|
<div className="text-center py-12 text-gray-500 dark:text-gray-400">
|
||||||
|
{emptyMessage}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
{virtualize && filteredItems.length > virtualizeThreshold
|
||||||
|
? renderVirtualizedList()
|
||||||
|
: renderRegularList()
|
||||||
|
}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Selection Summary */}
|
||||||
|
{enableMultiSelect && selectedIds.size > 0 && (
|
||||||
|
<div className="mt-4 p-3 bg-blue-50 dark:bg-blue-900/20 rounded-lg">
|
||||||
|
<p className="text-sm text-blue-700 dark:text-blue-300">
|
||||||
|
{selectedIds.size} item{selectedIds.size !== 1 ? 's' : ''} selected
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hook for managing searchable list state
|
||||||
|
*/
|
||||||
|
export function useSearchableList<T extends SearchableListItem>(
|
||||||
|
items: T[],
|
||||||
|
searchFields: (keyof T)[] = ['title', 'description'] as (keyof T)[]
|
||||||
|
) {
|
||||||
|
const [searchQuery, setSearchQuery] = useState('');
|
||||||
|
const [isPending, startTransition] = useTransition();
|
||||||
|
|
||||||
|
const filteredItems = useMemo(() => {
|
||||||
|
if (!searchQuery.trim()) {
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = searchQuery.toLowerCase();
|
||||||
|
return items.filter(item => {
|
||||||
|
return searchFields.some(field => {
|
||||||
|
const value = item[field];
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
return value.toLowerCase().includes(query);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, [items, searchQuery, searchFields]);
|
||||||
|
|
||||||
|
const updateSearch = useCallback((query: string) => {
|
||||||
|
startTransition(() => {
|
||||||
|
setSearchQuery(query);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const clearSearch = useCallback(() => {
|
||||||
|
startTransition(() => {
|
||||||
|
setSearchQuery('');
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return {
|
||||||
|
searchQuery,
|
||||||
|
filteredItems,
|
||||||
|
isPending,
|
||||||
|
updateSearch,
|
||||||
|
clearSearch
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -149,7 +149,7 @@ export const KnowledgeItemCard = ({
|
|||||||
const [showPageTooltip, setShowPageTooltip] = useState(false);
|
const [showPageTooltip, setShowPageTooltip] = useState(false);
|
||||||
const [isRemoving, setIsRemoving] = useState(false);
|
const [isRemoving, setIsRemoving] = useState(false);
|
||||||
const [showEditModal, setShowEditModal] = useState(false);
|
const [showEditModal, setShowEditModal] = useState(false);
|
||||||
const [loadedCodeExamples, setLoadedCodeExamples] = useState<any[] | null>(null);
|
const [loadedCodeExamples, setLoadedCodeExamples] = useState<Array<{id: string; summary: string; code: string; language?: string}> | null>(null);
|
||||||
const [isLoadingCodeExamples, setIsLoadingCodeExamples] = useState(false);
|
const [isLoadingCodeExamples, setIsLoadingCodeExamples] = useState(false);
|
||||||
|
|
||||||
const statusColorMap = {
|
const statusColorMap = {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { ToolTestingPanel } from './ToolTestingPanel';
|
|||||||
import { Button } from '../ui/Button';
|
import { Button } from '../ui/Button';
|
||||||
import { mcpClientService, MCPClient, MCPClientConfig } from '../../services/mcpClientService';
|
import { mcpClientService, MCPClient, MCPClientConfig } from '../../services/mcpClientService';
|
||||||
import { useToast } from '../../contexts/ToastContext';
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
import { DeleteConfirmModal } from '../../pages/ProjectPage';
|
import { DeleteConfirmModal } from '../ui/DeleteConfirmModal';
|
||||||
|
|
||||||
// Client interface (keeping for backward compatibility)
|
// Client interface (keeping for backward compatibility)
|
||||||
export interface Client {
|
export interface Client {
|
||||||
@@ -710,18 +710,31 @@ const EditClientDrawer: React.FC<EditClientDrawerProps> = ({ client, isOpen, onC
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleDelete = async () => {
|
const handleDelete = () => {
|
||||||
if (confirm(`Are you sure you want to delete "${client.name}"?`)) {
|
setClientToDelete(client);
|
||||||
try {
|
setShowDeleteConfirm(true);
|
||||||
await mcpClientService.deleteClient(client.id);
|
};
|
||||||
onClose();
|
|
||||||
// Trigger a reload of the clients list
|
const confirmDeleteClient = async () => {
|
||||||
window.location.reload();
|
if (!clientToDelete) return;
|
||||||
} catch (error) {
|
|
||||||
setError(error instanceof Error ? error.message : 'Failed to delete client');
|
try {
|
||||||
}
|
await mcpClientService.deleteClient(clientToDelete.id);
|
||||||
|
onClose();
|
||||||
|
// Trigger a reload of the clients list
|
||||||
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
setError(error instanceof Error ? error.message : 'Failed to delete client');
|
||||||
|
} finally {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setClientToDelete(null);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const cancelDeleteClient = () => {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setClientToDelete(null);
|
||||||
|
};
|
||||||
|
|
||||||
if (!isOpen) return null;
|
if (!isOpen) return null;
|
||||||
|
|
||||||
@@ -853,6 +866,16 @@ const EditClientDrawer: React.FC<EditClientDrawerProps> = ({ client, isOpen, onC
|
|||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Delete Confirmation Modal */}
|
||||||
|
{showDeleteConfirm && clientToDelete && (
|
||||||
|
<DeleteConfirmModal
|
||||||
|
itemName={clientToDelete.name}
|
||||||
|
onConfirm={confirmDeleteClient}
|
||||||
|
onCancel={cancelDeleteClient}
|
||||||
|
type="client"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -0,0 +1,213 @@
|
|||||||
|
import React, { useState, useRef, useEffect, useCallback } from 'react';
|
||||||
|
import { User, Bot, Code, Shield, CheckCircle } from 'lucide-react';
|
||||||
|
|
||||||
|
interface AssigneeTypeaheadInputProps {
|
||||||
|
value: string;
|
||||||
|
onChange: (value: string) => void;
|
||||||
|
placeholder?: string;
|
||||||
|
className?: string;
|
||||||
|
onKeyPress?: (e: React.KeyboardEvent<HTMLInputElement>) => void;
|
||||||
|
autoFocus?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default assignee options with icons
|
||||||
|
const DEFAULT_ASSIGNEES = [
|
||||||
|
{ value: 'User', icon: User, color: 'text-blue-500' },
|
||||||
|
{ value: 'Archon', icon: Bot, color: 'text-pink-500' },
|
||||||
|
{ value: 'AI IDE Agent', icon: Code, color: 'text-emerald-500' },
|
||||||
|
{ value: 'IDE Agent', icon: Code, color: 'text-emerald-500' },
|
||||||
|
{ value: 'prp-executor', icon: Shield, color: 'text-purple-500' },
|
||||||
|
{ value: 'prp-validator', icon: CheckCircle, color: 'text-cyan-500' }
|
||||||
|
];
|
||||||
|
|
||||||
|
export const AssigneeTypeaheadInput: React.FC<AssigneeTypeaheadInputProps> = ({
|
||||||
|
value,
|
||||||
|
onChange,
|
||||||
|
placeholder = 'Type or select assignee...',
|
||||||
|
className = '',
|
||||||
|
onKeyPress,
|
||||||
|
autoFocus = false
|
||||||
|
}) => {
|
||||||
|
const [inputValue, setInputValue] = useState(value);
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [highlightedIndex, setHighlightedIndex] = useState(0);
|
||||||
|
const [filteredOptions, setFilteredOptions] = useState(DEFAULT_ASSIGNEES);
|
||||||
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
|
// Update input value when prop changes
|
||||||
|
useEffect(() => {
|
||||||
|
setInputValue(value);
|
||||||
|
}, [value]);
|
||||||
|
|
||||||
|
// Filter options based on input
|
||||||
|
useEffect(() => {
|
||||||
|
const filtered = inputValue.trim() === ''
|
||||||
|
? DEFAULT_ASSIGNEES
|
||||||
|
: DEFAULT_ASSIGNEES.filter(option =>
|
||||||
|
option.value.toLowerCase().includes(inputValue.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add current input as an option if it's not in the default list and not empty
|
||||||
|
if (inputValue.trim() && !DEFAULT_ASSIGNEES.find(opt => opt.value.toLowerCase() === inputValue.toLowerCase())) {
|
||||||
|
filtered.push({
|
||||||
|
value: inputValue,
|
||||||
|
icon: User,
|
||||||
|
color: 'text-gray-500'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
setFilteredOptions(filtered);
|
||||||
|
setHighlightedIndex(0);
|
||||||
|
}, [inputValue]);
|
||||||
|
|
||||||
|
// Handle clicking outside to close dropdown
|
||||||
|
useEffect(() => {
|
||||||
|
const handleClickOutside = (event: MouseEvent) => {
|
||||||
|
if (
|
||||||
|
dropdownRef.current &&
|
||||||
|
!dropdownRef.current.contains(event.target as Node) &&
|
||||||
|
inputRef.current &&
|
||||||
|
!inputRef.current.contains(event.target as Node)
|
||||||
|
) {
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
document.addEventListener('mousedown', handleClickOutside);
|
||||||
|
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
const newValue = e.target.value;
|
||||||
|
setInputValue(newValue);
|
||||||
|
setIsOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleInputFocus = () => {
|
||||||
|
setIsOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleInputBlur = () => {
|
||||||
|
// Delay to allow click on dropdown item
|
||||||
|
setTimeout(() => {
|
||||||
|
// Only trigger onChange if the value actually changed
|
||||||
|
if (inputValue !== value) {
|
||||||
|
onChange(inputValue);
|
||||||
|
}
|
||||||
|
setIsOpen(false);
|
||||||
|
}, 200);
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectOption = useCallback((optionValue: string) => {
|
||||||
|
setInputValue(optionValue);
|
||||||
|
onChange(optionValue);
|
||||||
|
setIsOpen(false);
|
||||||
|
inputRef.current?.focus();
|
||||||
|
}, [onChange]);
|
||||||
|
|
||||||
|
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
if (!isOpen && (e.key === 'ArrowDown' || e.key === 'ArrowUp')) {
|
||||||
|
setIsOpen(true);
|
||||||
|
e.preventDefault();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isOpen) return;
|
||||||
|
|
||||||
|
switch (e.key) {
|
||||||
|
case 'ArrowDown':
|
||||||
|
e.preventDefault();
|
||||||
|
setHighlightedIndex(prev =>
|
||||||
|
prev < filteredOptions.length - 1 ? prev + 1 : 0
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
case 'ArrowUp':
|
||||||
|
e.preventDefault();
|
||||||
|
setHighlightedIndex(prev =>
|
||||||
|
prev > 0 ? prev - 1 : filteredOptions.length - 1
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
case 'Enter':
|
||||||
|
e.preventDefault();
|
||||||
|
if (filteredOptions[highlightedIndex]) {
|
||||||
|
selectOption(filteredOptions[highlightedIndex].value);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'Escape':
|
||||||
|
e.preventDefault();
|
||||||
|
setIsOpen(false);
|
||||||
|
break;
|
||||||
|
case 'Tab':
|
||||||
|
if (filteredOptions[highlightedIndex]) {
|
||||||
|
selectOption(filteredOptions[highlightedIndex].value);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleKeyPressWrapper = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
// Don't trigger the parent's Enter handler if dropdown is open
|
||||||
|
if (e.key === 'Enter' && isOpen && filteredOptions.length > 0) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
onKeyPress?.(e);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="relative">
|
||||||
|
<input
|
||||||
|
ref={inputRef}
|
||||||
|
type="text"
|
||||||
|
value={inputValue}
|
||||||
|
onChange={handleInputChange}
|
||||||
|
onFocus={handleInputFocus}
|
||||||
|
onBlur={handleInputBlur}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
onKeyPress={handleKeyPressWrapper}
|
||||||
|
placeholder={placeholder}
|
||||||
|
className={className}
|
||||||
|
autoFocus={autoFocus}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{isOpen && filteredOptions.length > 0 && (
|
||||||
|
<div
|
||||||
|
ref={dropdownRef}
|
||||||
|
className="absolute z-50 w-full mt-1 bg-white dark:bg-gray-900 border border-gray-200 dark:border-gray-700 rounded-md shadow-lg max-h-60 overflow-auto"
|
||||||
|
>
|
||||||
|
{filteredOptions.map((option, index) => {
|
||||||
|
const Icon = option.icon;
|
||||||
|
const isHighlighted = index === highlightedIndex;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={option.value}
|
||||||
|
onClick={() => selectOption(option.value)}
|
||||||
|
className={`
|
||||||
|
flex items-center gap-2 px-3 py-2 cursor-pointer transition-colors
|
||||||
|
${isHighlighted
|
||||||
|
? 'bg-cyan-100 dark:bg-cyan-900/30'
|
||||||
|
: 'hover:bg-gray-100 dark:hover:bg-gray-800'
|
||||||
|
}
|
||||||
|
`}
|
||||||
|
onMouseEnter={() => setHighlightedIndex(index)}
|
||||||
|
>
|
||||||
|
<Icon className={`w-4 h-4 ${option.color}`} />
|
||||||
|
<span className="text-sm text-gray-700 dark:text-gray-300">
|
||||||
|
{option.value}
|
||||||
|
</span>
|
||||||
|
{option.value === inputValue && (
|
||||||
|
<span className="ml-auto text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
current
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -14,6 +14,7 @@ import { MilkdownEditor } from './MilkdownEditor';
|
|||||||
import { VersionHistoryModal } from './VersionHistoryModal';
|
import { VersionHistoryModal } from './VersionHistoryModal';
|
||||||
import { PRPViewer } from '../prp';
|
import { PRPViewer } from '../prp';
|
||||||
import { DocumentCard, NewDocumentCard } from './DocumentCard';
|
import { DocumentCard, NewDocumentCard } from './DocumentCard';
|
||||||
|
import { DeleteConfirmModal } from '../ui/DeleteConfirmModal';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -24,7 +25,7 @@ interface ProjectDoc {
|
|||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
// Content field stores markdown or structured data
|
// Content field stores markdown or structured data
|
||||||
content?: any;
|
content: any;
|
||||||
document_type?: string;
|
document_type?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -514,6 +515,10 @@ export const DocsTab = ({
|
|||||||
// Document state
|
// Document state
|
||||||
const [documents, setDocuments] = useState<ProjectDoc[]>([]);
|
const [documents, setDocuments] = useState<ProjectDoc[]>([]);
|
||||||
const [selectedDocument, setSelectedDocument] = useState<ProjectDoc | null>(null);
|
const [selectedDocument, setSelectedDocument] = useState<ProjectDoc | null>(null);
|
||||||
|
|
||||||
|
// Delete confirmation modal state
|
||||||
|
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||||
|
const [documentToDelete, setDocumentToDelete] = useState<{ id: string; title: string } | null>(null);
|
||||||
const [isEditing, setIsEditing] = useState(false);
|
const [isEditing, setIsEditing] = useState(false);
|
||||||
const [isSaving, setIsSaving] = useState(false);
|
const [isSaving, setIsSaving] = useState(false);
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
@@ -569,13 +574,20 @@ export const DocsTab = ({
|
|||||||
const projectDocuments: ProjectDoc[] = project.docs.map((doc: any) => ({
|
const projectDocuments: ProjectDoc[] = project.docs.map((doc: any) => ({
|
||||||
id: doc.id,
|
id: doc.id,
|
||||||
title: doc.title || 'Untitled Document',
|
title: doc.title || 'Untitled Document',
|
||||||
created_at: doc.created_at,
|
created_at: doc.created_at || new Date().toISOString(),
|
||||||
updated_at: doc.updated_at,
|
updated_at: doc.updated_at || new Date().toISOString(),
|
||||||
content: doc.content,
|
content: doc.content || {},
|
||||||
document_type: doc.document_type || 'document'
|
document_type: doc.document_type || 'document'
|
||||||
}));
|
}));
|
||||||
|
|
||||||
setDocuments(projectDocuments);
|
// Merge with existing documents, preserving any temporary documents
|
||||||
|
setDocuments(prev => {
|
||||||
|
// Keep any temporary documents (ones with temp- prefix)
|
||||||
|
const tempDocs = prev.filter(doc => doc.id.startsWith('temp-'));
|
||||||
|
|
||||||
|
// Merge temporary docs with loaded docs
|
||||||
|
return [...projectDocuments, ...tempDocs];
|
||||||
|
});
|
||||||
|
|
||||||
// Auto-select first document if available and no document is currently selected
|
// Auto-select first document if available and no document is currently selected
|
||||||
if (projectDocuments.length > 0 && !selectedDocument) {
|
if (projectDocuments.length > 0 && !selectedDocument) {
|
||||||
@@ -598,26 +610,69 @@ export const DocsTab = ({
|
|||||||
const template = DOCUMENT_TEMPLATES[templateKey as keyof typeof DOCUMENT_TEMPLATES];
|
const template = DOCUMENT_TEMPLATES[templateKey as keyof typeof DOCUMENT_TEMPLATES];
|
||||||
if (!template) return;
|
if (!template) return;
|
||||||
|
|
||||||
|
// Create a temporary document for optimistic update
|
||||||
|
const tempDocument: ProjectDoc = {
|
||||||
|
id: `temp-${Date.now()}`,
|
||||||
|
title: template.name,
|
||||||
|
created_at: new Date().toISOString(),
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
content: template.content,
|
||||||
|
document_type: template.document_type
|
||||||
|
};
|
||||||
|
|
||||||
|
// Optimistically add the document to the UI immediately
|
||||||
|
console.log('[DocsTab] Adding temporary document:', tempDocument);
|
||||||
|
setDocuments(prev => {
|
||||||
|
const updated = [...prev, tempDocument];
|
||||||
|
console.log('[DocsTab] Documents after optimistic add:', updated);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
setSelectedDocument(tempDocument);
|
||||||
|
setShowTemplateModal(false);
|
||||||
|
setIsSaving(false); // Allow UI to show the temp document
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setIsSaving(true);
|
setIsSaving(true);
|
||||||
|
|
||||||
// Create the document in the database first
|
// Create document via backend API
|
||||||
const newDocument = await projectService.createDocument(project.id, {
|
const createdDoc = await projectService.createDocument(project.id, {
|
||||||
title: template.name,
|
title: template.name,
|
||||||
content: template.content,
|
content: template.content,
|
||||||
document_type: template.document_type,
|
document_type: template.document_type
|
||||||
tags: []
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add to documents list with the real document from the database
|
// Ensure the created document has all required fields
|
||||||
setDocuments(prev => [...prev, newDocument]);
|
const newDocument: ProjectDoc = {
|
||||||
|
id: createdDoc.id,
|
||||||
|
title: createdDoc.title || template.name,
|
||||||
|
created_at: createdDoc.created_at || new Date().toISOString(),
|
||||||
|
updated_at: createdDoc.updated_at || new Date().toISOString(),
|
||||||
|
content: createdDoc.content || template.content,
|
||||||
|
document_type: createdDoc.document_type || template.document_type
|
||||||
|
};
|
||||||
|
|
||||||
|
// Replace temp document with real one - same pattern as tasks
|
||||||
|
setDocuments(prev => {
|
||||||
|
// Find and replace the temp document
|
||||||
|
const updated = prev.map(doc =>
|
||||||
|
doc.id === tempDocument.id ? newDocument : doc
|
||||||
|
);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Select the newly created document
|
||||||
setSelectedDocument(newDocument);
|
setSelectedDocument(newDocument);
|
||||||
|
|
||||||
console.log('Document created successfully:', newDocument);
|
console.log('Document created successfully via API:', newDocument);
|
||||||
showToast('Document created successfully', 'success');
|
showToast('Document created successfully', 'success');
|
||||||
setShowTemplateModal(false);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to create document:', error);
|
console.error('Failed to create document:', error);
|
||||||
|
|
||||||
|
// Remove the temporary document on error
|
||||||
|
setDocuments(prev => prev.filter(doc => doc.id !== tempDocument.id));
|
||||||
|
setSelectedDocument(null);
|
||||||
|
setShowTemplateModal(true); // Re-open the modal
|
||||||
|
|
||||||
showToast(
|
showToast(
|
||||||
error instanceof Error ? error.message : 'Failed to create document',
|
error instanceof Error ? error.message : 'Failed to create document',
|
||||||
'error'
|
'error'
|
||||||
@@ -634,25 +689,19 @@ export const DocsTab = ({
|
|||||||
try {
|
try {
|
||||||
setIsSaving(true);
|
setIsSaving(true);
|
||||||
|
|
||||||
// Call backend API to persist changes
|
// Update the document via backend API
|
||||||
const updatedDocument = await projectService.updateDocument(
|
const updatedDocument = await projectService.updateDocument(project.id, selectedDocument.id, {
|
||||||
project.id,
|
...selectedDocument,
|
||||||
selectedDocument.id,
|
updated_at: new Date().toISOString()
|
||||||
{
|
});
|
||||||
title: selectedDocument.title,
|
|
||||||
content: selectedDocument.content,
|
|
||||||
tags: selectedDocument.tags,
|
|
||||||
author: selectedDocument.author
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Update local state with backend response
|
// Update local state with the response from backend
|
||||||
setDocuments(prev => prev.map(doc =>
|
setDocuments(prev => prev.map(doc =>
|
||||||
doc.id === selectedDocument.id ? updatedDocument : doc
|
doc.id === selectedDocument.id ? updatedDocument : doc
|
||||||
));
|
));
|
||||||
setSelectedDocument(updatedDocument);
|
setSelectedDocument(updatedDocument);
|
||||||
|
|
||||||
console.log('Document saved successfully:', updatedDocument);
|
console.log('Document saved successfully via API:', updatedDocument);
|
||||||
showToast('Document saved successfully', 'success');
|
showToast('Document saved successfully', 'success');
|
||||||
setIsEditing(false);
|
setIsEditing(false);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -790,6 +839,34 @@ export const DocsTab = ({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Delete confirmation handlers
|
||||||
|
const confirmDeleteDocument = async () => {
|
||||||
|
if (!documentToDelete || !project?.id) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Call API to delete from database first
|
||||||
|
await projectService.deleteDocument(project.id, documentToDelete.id);
|
||||||
|
|
||||||
|
// Then remove from local state
|
||||||
|
setDocuments(prev => prev.filter(d => d.id !== documentToDelete.id));
|
||||||
|
if (selectedDocument?.id === documentToDelete.id) {
|
||||||
|
setSelectedDocument(documents.find(d => d.id !== documentToDelete.id) || null);
|
||||||
|
}
|
||||||
|
showToast('Document deleted', 'success');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to delete document:', error);
|
||||||
|
showToast('Failed to delete document', 'error');
|
||||||
|
} finally {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setDocumentToDelete(null);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const cancelDeleteDocument = () => {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setDocumentToDelete(null);
|
||||||
|
};
|
||||||
|
|
||||||
const handleProgressComplete = (data: CrawlProgressData) => {
|
const handleProgressComplete = (data: CrawlProgressData) => {
|
||||||
console.log('Crawl completed:', data);
|
console.log('Crawl completed:', data);
|
||||||
setProgressItems(prev => prev.filter(item => item.progressId !== data.progressId));
|
setProgressItems(prev => prev.filter(item => item.progressId !== data.progressId));
|
||||||
@@ -942,20 +1019,11 @@ export const DocsTab = ({
|
|||||||
document={doc}
|
document={doc}
|
||||||
isActive={selectedDocument?.id === doc.id}
|
isActive={selectedDocument?.id === doc.id}
|
||||||
onSelect={setSelectedDocument}
|
onSelect={setSelectedDocument}
|
||||||
onDelete={async (docId) => {
|
onDelete={(docId) => {
|
||||||
try {
|
const doc = documents.find(d => d.id === docId);
|
||||||
// Call API to delete from database first
|
if (doc) {
|
||||||
await projectService.deleteDocument(project.id, docId);
|
setDocumentToDelete({ id: docId, title: doc.title });
|
||||||
|
setShowDeleteConfirm(true);
|
||||||
// Then remove from local state
|
|
||||||
setDocuments(prev => prev.filter(d => d.id !== docId));
|
|
||||||
if (selectedDocument?.id === docId) {
|
|
||||||
setSelectedDocument(documents.find(d => d.id !== docId) || null);
|
|
||||||
}
|
|
||||||
showToast('Document deleted', 'success');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to delete document:', error);
|
|
||||||
showToast('Failed to delete document', 'error');
|
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
isDarkMode={isDarkMode}
|
isDarkMode={isDarkMode}
|
||||||
@@ -986,28 +1054,24 @@ export const DocsTab = ({
|
|||||||
document={selectedDocument}
|
document={selectedDocument}
|
||||||
isDarkMode={isDarkMode}
|
isDarkMode={isDarkMode}
|
||||||
onSave={async (updatedDocument) => {
|
onSave={async (updatedDocument) => {
|
||||||
|
if (!project?.id) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setIsSaving(true);
|
setIsSaving(true);
|
||||||
|
|
||||||
// Call backend API to persist changes
|
// Update document via backend API
|
||||||
const savedDocument = await projectService.updateDocument(
|
const savedDocument = await projectService.updateDocument(project.id, updatedDocument.id, {
|
||||||
project.id,
|
...updatedDocument,
|
||||||
updatedDocument.id,
|
updated_at: new Date().toISOString()
|
||||||
{
|
});
|
||||||
title: updatedDocument.title,
|
|
||||||
content: updatedDocument.content,
|
|
||||||
tags: updatedDocument.tags,
|
|
||||||
author: updatedDocument.author
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Update local state with backend response
|
// Update local state with the response from backend
|
||||||
setSelectedDocument(savedDocument);
|
setSelectedDocument(savedDocument);
|
||||||
setDocuments(prev => prev.map(doc =>
|
setDocuments(prev => prev.map(doc =>
|
||||||
doc.id === updatedDocument.id ? savedDocument : doc
|
doc.id === updatedDocument.id ? savedDocument : doc
|
||||||
));
|
));
|
||||||
|
|
||||||
console.log('Document saved via MilkdownEditor');
|
console.log('Document saved via MilkdownEditor API:', savedDocument);
|
||||||
showToast('Document saved successfully', 'success');
|
showToast('Document saved successfully', 'success');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to save document:', error);
|
console.error('Failed to save document:', error);
|
||||||
@@ -1108,6 +1172,16 @@ export const DocsTab = ({
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Delete Confirmation Modal */}
|
||||||
|
{showDeleteConfirm && documentToDelete && (
|
||||||
|
<DeleteConfirmModal
|
||||||
|
itemName={documentToDelete.title}
|
||||||
|
onConfirm={confirmDeleteDocument}
|
||||||
|
onCancel={cancelDeleteDocument}
|
||||||
|
type="document"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -1201,7 +1275,7 @@ const TemplateModal: React.FC<{
|
|||||||
const KnowledgeSection: React.FC<{
|
const KnowledgeSection: React.FC<{
|
||||||
title: string;
|
title: string;
|
||||||
color: 'blue' | 'purple' | 'pink' | 'orange';
|
color: 'blue' | 'purple' | 'pink' | 'orange';
|
||||||
sources: any[];
|
sources: Array<{id: string; title: string; type: string; lastUpdated: string} | undefined>;
|
||||||
onAddClick: () => void;
|
onAddClick: () => void;
|
||||||
}> = ({
|
}> = ({
|
||||||
title,
|
title,
|
||||||
@@ -1284,7 +1358,7 @@ const KnowledgeSection: React.FC<{
|
|||||||
|
|
||||||
const SourceSelectionModal: React.FC<{
|
const SourceSelectionModal: React.FC<{
|
||||||
title: string;
|
title: string;
|
||||||
sources: any[];
|
sources: Array<{id: string; title: string; type: string; lastUpdated: string}>;
|
||||||
selectedSources: string[];
|
selectedSources: string[];
|
||||||
onToggleSource: (id: string) => void;
|
onToggleSource: (id: string) => void;
|
||||||
onSave: () => void;
|
onSave: () => void;
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import { Rocket, Code, Briefcase, Users, FileText, X, Plus, Clipboard } from 'lucide-react';
|
import { Rocket, Code, Briefcase, Users, FileText, X, Plus, Clipboard } from 'lucide-react';
|
||||||
import { useToast } from '../../contexts/ToastContext';
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
|
import { copyToClipboard } from '../../utils/clipboard';
|
||||||
|
|
||||||
export interface ProjectDoc {
|
export interface ProjectDoc {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -49,18 +50,22 @@ export const DocumentCard: React.FC<DocumentCardProps> = ({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleCopyId = (e: React.MouseEvent) => {
|
const handleCopyId = async (e: React.MouseEvent) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
navigator.clipboard.writeText(document.id);
|
const success = await copyToClipboard(document.id);
|
||||||
showToast('Document ID copied to clipboard', 'success');
|
if (success) {
|
||||||
|
showToast('Document ID copied to clipboard', 'success');
|
||||||
// Visual feedback
|
|
||||||
const button = e.currentTarget;
|
// Visual feedback
|
||||||
const originalHTML = button.innerHTML;
|
const button = e.currentTarget;
|
||||||
button.innerHTML = '<div class="flex items-center gap-1"><span class="w-3 h-3 text-green-500">✓</span><span class="text-green-500 text-xs">Copied</span></div>';
|
const originalHTML = button.innerHTML;
|
||||||
setTimeout(() => {
|
button.innerHTML = '<div class="flex items-center gap-1"><span class="w-3 h-3 text-green-500">✓</span><span class="text-green-500 text-xs">Copied</span></div>';
|
||||||
button.innerHTML = originalHTML;
|
setTimeout(() => {
|
||||||
}, 2000);
|
button.innerHTML = originalHTML;
|
||||||
|
}, 2000);
|
||||||
|
} else {
|
||||||
|
showToast('Failed to copy Document ID', 'error');
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -115,9 +120,7 @@ export const DocumentCard: React.FC<DocumentCardProps> = ({
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
if (confirm(`Delete "${document.title}"?`)) {
|
onDelete(document.id);
|
||||||
onDelete(document.id);
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
className="absolute top-2 right-2 p-1 rounded-md bg-red-500/10 hover:bg-red-500/20 text-red-600 dark:text-red-400 transition-colors"
|
className="absolute top-2 right-2 p-1 rounded-md bg-red-500/10 hover:bg-red-500/20 text-red-600 dark:text-red-400 transition-colors"
|
||||||
aria-label={`Delete ${document.title}`}
|
aria-label={`Delete ${document.title}`}
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import { useDrag, useDrop } from 'react-dnd';
|
|||||||
import { Edit, Trash2, RefreshCw, Tag, User, Bot, Clipboard } from 'lucide-react';
|
import { Edit, Trash2, RefreshCw, Tag, User, Bot, Clipboard } from 'lucide-react';
|
||||||
import { Task } from './TaskTableView';
|
import { Task } from './TaskTableView';
|
||||||
import { ItemTypes, getAssigneeIcon, getAssigneeGlow, getOrderColor, getOrderGlow } from '../../lib/task-utils';
|
import { ItemTypes, getAssigneeIcon, getAssigneeGlow, getOrderColor, getOrderGlow } from '../../lib/task-utils';
|
||||||
|
import { copyToClipboard } from '../../utils/clipboard';
|
||||||
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
|
|
||||||
export interface DraggableTaskCardProps {
|
export interface DraggableTaskCardProps {
|
||||||
task: Task;
|
task: Task;
|
||||||
@@ -27,6 +29,7 @@ export const DraggableTaskCard = ({
|
|||||||
hoveredTaskId,
|
hoveredTaskId,
|
||||||
onTaskHover,
|
onTaskHover,
|
||||||
}: DraggableTaskCardProps) => {
|
}: DraggableTaskCardProps) => {
|
||||||
|
const { showToast } = useToast();
|
||||||
|
|
||||||
const [{ isDragging }, drag] = useDrag({
|
const [{ isDragging }, drag] = useDrag({
|
||||||
type: ItemTypes.TASK,
|
type: ItemTypes.TASK,
|
||||||
@@ -197,17 +200,21 @@ export const DraggableTaskCard = ({
|
|||||||
<span className="text-gray-600 dark:text-gray-400 text-xs">{task.assignee?.name || 'User'}</span>
|
<span className="text-gray-600 dark:text-gray-400 text-xs">{task.assignee?.name || 'User'}</span>
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
onClick={async (e) => {
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
navigator.clipboard.writeText(task.id);
|
const success = await copyToClipboard(task.id);
|
||||||
// Optional: Add a small toast or visual feedback here
|
if (success) {
|
||||||
const button = e.currentTarget;
|
showToast('Task ID copied to clipboard', 'success');
|
||||||
const originalHTML = button.innerHTML;
|
// Visual feedback
|
||||||
button.innerHTML = '<span class="text-green-500">Copied!</span>';
|
const button = e.currentTarget;
|
||||||
setTimeout(() => {
|
const originalHTML = button.innerHTML;
|
||||||
button.innerHTML = originalHTML;
|
button.innerHTML = '<span class="text-green-500">Copied!</span>';
|
||||||
}, 2000);
|
setTimeout(() => {
|
||||||
|
button.innerHTML = originalHTML;
|
||||||
|
}, 2000);
|
||||||
|
} else {
|
||||||
|
showToast('Failed to copy Task ID', 'error');
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
className="flex items-center gap-1 text-xs text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 transition-colors"
|
className="flex items-center gap-1 text-xs text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 transition-colors"
|
||||||
title="Copy Task ID to clipboard"
|
title="Copy Task ID to clipboard"
|
||||||
|
|||||||
@@ -2,13 +2,13 @@ import React, { memo, useCallback, useMemo, useState, useEffect, useRef } from '
|
|||||||
import { X } from 'lucide-react';
|
import { X } from 'lucide-react';
|
||||||
import { Button } from '../ui/Button';
|
import { Button } from '../ui/Button';
|
||||||
import { ArchonLoadingSpinner } from '../animations/Animations';
|
import { ArchonLoadingSpinner } from '../animations/Animations';
|
||||||
import { DebouncedInput, FeatureInput } from './TaskInputComponents';
|
import { DebouncedInput, FeatureInput, AssigneeTypeaheadInput } from './TaskInputComponents';
|
||||||
import type { Task } from './TaskTableView';
|
import type { Task } from './TaskTableView';
|
||||||
|
|
||||||
interface EditTaskModalProps {
|
interface EditTaskModalProps {
|
||||||
isModalOpen: boolean;
|
isModalOpen: boolean;
|
||||||
editingTask: Task | null;
|
editingTask: Task | null;
|
||||||
projectFeatures: any[];
|
projectFeatures: import('../types/jsonb').ProjectFeature[];
|
||||||
isLoadingFeatures: boolean;
|
isLoadingFeatures: boolean;
|
||||||
isSavingTask: boolean;
|
isSavingTask: boolean;
|
||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
@@ -16,7 +16,15 @@ interface EditTaskModalProps {
|
|||||||
getTasksForPrioritySelection: (status: Task['status']) => Array<{value: number, label: string}>;
|
getTasksForPrioritySelection: (status: Task['status']) => Array<{value: number, label: string}>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ASSIGNEE_OPTIONS = ['User', 'Archon', 'AI IDE Agent'] as const;
|
// Assignee options - expanded to include all agent types
|
||||||
|
const ASSIGNEE_OPTIONS = [
|
||||||
|
'User',
|
||||||
|
'Archon',
|
||||||
|
'AI IDE Agent',
|
||||||
|
'IDE Agent',
|
||||||
|
'prp-executor',
|
||||||
|
'prp-validator'
|
||||||
|
] as const;
|
||||||
|
|
||||||
// Removed debounce utility - now using DebouncedInput component
|
// Removed debounce utility - now using DebouncedInput component
|
||||||
|
|
||||||
@@ -82,10 +90,10 @@ export const EditTaskModal = memo(({
|
|||||||
setLocalTask(prev => prev ? { ...prev, task_order: parseInt(e.target.value) } : null);
|
setLocalTask(prev => prev ? { ...prev, task_order: parseInt(e.target.value) } : null);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handleAssigneeChange = useCallback((e: React.ChangeEvent<HTMLSelectElement>) => {
|
const handleAssigneeChange = useCallback((value: string) => {
|
||||||
setLocalTask(prev => prev ? {
|
setLocalTask(prev => prev ? {
|
||||||
...prev,
|
...prev,
|
||||||
assignee: { name: e.target.value as 'User' | 'Archon' | 'AI IDE Agent', avatar: '' }
|
assignee: { name: value, avatar: '' }
|
||||||
} : null);
|
} : null);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@@ -167,15 +175,12 @@ export const EditTaskModal = memo(({
|
|||||||
<div className="grid grid-cols-2 gap-4">
|
<div className="grid grid-cols-2 gap-4">
|
||||||
<div>
|
<div>
|
||||||
<label className="block text-gray-700 dark:text-gray-300 mb-1">Assignee</label>
|
<label className="block text-gray-700 dark:text-gray-300 mb-1">Assignee</label>
|
||||||
<select
|
<AssigneeTypeaheadInput
|
||||||
value={localTask?.assignee?.name || 'User'}
|
value={localTask?.assignee?.name || 'User'}
|
||||||
onChange={handleAssigneeChange}
|
onChange={handleAssigneeChange}
|
||||||
|
placeholder="Type or select assignee..."
|
||||||
className="w-full bg-white/50 dark:bg-black/70 border border-gray-300 dark:border-gray-700 text-gray-700 dark:text-white rounded-md py-2 px-3 focus:outline-none focus:border-cyan-400 focus:shadow-[0_0_10px_rgba(34,211,238,0.2)] transition-all duration-300"
|
className="w-full bg-white/50 dark:bg-black/70 border border-gray-300 dark:border-gray-700 text-gray-700 dark:text-white rounded-md py-2 px-3 focus:outline-none focus:border-cyan-400 focus:shadow-[0_0_10px_rgba(34,211,238,0.2)] transition-all duration-300"
|
||||||
>
|
/>
|
||||||
{ASSIGNEE_OPTIONS.map(option => (
|
|
||||||
<option key={option} value={option}>{option}</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
|
|||||||
@@ -149,7 +149,7 @@ interface FeaturesTabProps {
|
|||||||
project?: {
|
project?: {
|
||||||
id: string;
|
id: string;
|
||||||
title: string;
|
title: string;
|
||||||
features?: any[];
|
features?: import('../types/jsonb').ProjectFeature[];
|
||||||
} | null;
|
} | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import React, { useRef, useState, useCallback } from 'react';
|
import React, { useRef, useState, useCallback } from 'react';
|
||||||
import { useDrag, useDrop } from 'react-dnd';
|
import { useDrag, useDrop } from 'react-dnd';
|
||||||
import { useToast } from '../../contexts/ToastContext';
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
import { DeleteConfirmModal } from '../../pages/ProjectPage';
|
import { DeleteConfirmModal } from '../ui/DeleteConfirmModal';
|
||||||
import { CheckSquare, Square, Trash2, ArrowRight } from 'lucide-react';
|
import { CheckSquare, Square, Trash2, ArrowRight } from 'lucide-react';
|
||||||
import { projectService } from '../../services/projectService';
|
import { projectService } from '../../services/projectService';
|
||||||
import { Task } from './TaskTableView'; // Import Task interface
|
import { Task } from './TaskTableView'; // Import Task interface
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ DebouncedInput.displayName = 'DebouncedInput';
|
|||||||
interface FeatureInputProps {
|
interface FeatureInputProps {
|
||||||
value: string;
|
value: string;
|
||||||
onChange: (value: string) => void;
|
onChange: (value: string) => void;
|
||||||
projectFeatures: any[];
|
projectFeatures: import('../types/jsonb').ProjectFeature[];
|
||||||
isLoadingFeatures: boolean;
|
isLoadingFeatures: boolean;
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
className?: string;
|
className?: string;
|
||||||
@@ -169,4 +169,7 @@ export const FeatureInput = memo(({
|
|||||||
prevProps.projectFeatures === nextProps.projectFeatures;
|
prevProps.projectFeatures === nextProps.projectFeatures;
|
||||||
});
|
});
|
||||||
|
|
||||||
FeatureInput.displayName = 'FeatureInput';
|
FeatureInput.displayName = 'FeatureInput';
|
||||||
|
|
||||||
|
// Re-export AssigneeTypeaheadInput for convenience
|
||||||
|
export { AssigneeTypeaheadInput } from './AssigneeTypeaheadInput';
|
||||||
@@ -2,10 +2,12 @@ import React, { useState, useCallback, useRef, useEffect } from 'react';
|
|||||||
import { useDrag, useDrop } from 'react-dnd';
|
import { useDrag, useDrop } from 'react-dnd';
|
||||||
import { Check, Trash2, Edit, Tag, User, Bot, Clipboard, Save, Plus } from 'lucide-react';
|
import { Check, Trash2, Edit, Tag, User, Bot, Clipboard, Save, Plus } from 'lucide-react';
|
||||||
import { useToast } from '../../contexts/ToastContext';
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
import { DeleteConfirmModal } from '../../pages/ProjectPage';
|
import { DeleteConfirmModal } from '../ui/DeleteConfirmModal';
|
||||||
import { projectService } from '../../services/projectService';
|
import { projectService } from '../../services/projectService';
|
||||||
import { ItemTypes, getAssigneeIcon, getAssigneeGlow, getOrderColor, getOrderGlow } from '../../lib/task-utils';
|
import { ItemTypes, getAssigneeIcon, getAssigneeGlow, getOrderColor, getOrderGlow } from '../../lib/task-utils';
|
||||||
import { DraggableTaskCard } from './DraggableTaskCard';
|
import { DraggableTaskCard } from './DraggableTaskCard';
|
||||||
|
import { copyToClipboard } from '../../utils/clipboard';
|
||||||
|
import { AssigneeTypeaheadInput } from './TaskInputComponents';
|
||||||
|
|
||||||
export interface Task {
|
export interface Task {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -13,7 +15,7 @@ export interface Task {
|
|||||||
description: string;
|
description: string;
|
||||||
status: 'backlog' | 'in-progress' | 'review' | 'complete';
|
status: 'backlog' | 'in-progress' | 'review' | 'complete';
|
||||||
assignee: {
|
assignee: {
|
||||||
name: 'User' | 'Archon' | 'AI IDE Agent';
|
name: string; // Allow any assignee name for MCP subagents
|
||||||
avatar: string;
|
avatar: string;
|
||||||
};
|
};
|
||||||
feature: string;
|
feature: string;
|
||||||
@@ -31,7 +33,7 @@ interface TaskTableViewProps {
|
|||||||
onTaskUpdate?: (taskId: string, updates: Partial<Task>) => Promise<void>;
|
onTaskUpdate?: (taskId: string, updates: Partial<Task>) => Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const getAssigneeGlassStyle = (assigneeName: 'User' | 'Archon' | 'AI IDE Agent') => {
|
const getAssigneeGlassStyle = (assigneeName: string) => {
|
||||||
switch (assigneeName) {
|
switch (assigneeName) {
|
||||||
case 'User':
|
case 'User':
|
||||||
return 'backdrop-blur-md bg-gradient-to-b from-white/80 to-white/60 dark:from-white/10 dark:to-black/30 border-blue-400 dark:border-blue-500'; // blue glass
|
return 'backdrop-blur-md bg-gradient-to-b from-white/80 to-white/60 dark:from-white/10 dark:to-black/30 border-blue-400 dark:border-blue-500'; // blue glass
|
||||||
@@ -78,7 +80,7 @@ const reorderTasks = (tasks: Task[], fromIndex: number, toIndex: number): Task[]
|
|||||||
interface EditableCellProps {
|
interface EditableCellProps {
|
||||||
value: string;
|
value: string;
|
||||||
onSave: (value: string) => void;
|
onSave: (value: string) => void;
|
||||||
type?: 'text' | 'textarea' | 'select';
|
type?: 'text' | 'textarea' | 'select' | 'typeahead';
|
||||||
options?: string[];
|
options?: string[];
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
isEditing: boolean;
|
isEditing: boolean;
|
||||||
@@ -139,7 +141,37 @@ const EditableCell = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center w-full">
|
<div className="flex items-center w-full">
|
||||||
{type === 'select' ? (
|
{type === 'typeahead' ? (
|
||||||
|
<div className="relative">
|
||||||
|
<AssigneeTypeaheadInput
|
||||||
|
value={editValue}
|
||||||
|
onChange={(value) => {
|
||||||
|
setEditValue(value);
|
||||||
|
// Update the value but don't auto-save yet
|
||||||
|
}}
|
||||||
|
onKeyPress={(e) => {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
e.preventDefault();
|
||||||
|
handleSave();
|
||||||
|
} else if (e.key === 'Escape') {
|
||||||
|
e.preventDefault();
|
||||||
|
handleCancel();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
placeholder={placeholder}
|
||||||
|
className="w-full bg-white/90 dark:bg-black/90 border border-cyan-300 dark:border-cyan-600 rounded px-2 py-1 text-sm focus:outline-none focus:border-cyan-500 focus:shadow-[0_0_5px_rgba(34,211,238,0.3)]"
|
||||||
|
autoFocus
|
||||||
|
/>
|
||||||
|
{/* Save button for explicit save */}
|
||||||
|
<button
|
||||||
|
onClick={handleSave}
|
||||||
|
className="absolute right-0 top-0 h-full px-2 text-cyan-600 hover:text-cyan-700 dark:text-cyan-400 dark:hover:text-cyan-300"
|
||||||
|
title="Save (Enter)"
|
||||||
|
>
|
||||||
|
✓
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
) : type === 'select' ? (
|
||||||
<select
|
<select
|
||||||
value={editValue}
|
value={editValue}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
@@ -208,6 +240,7 @@ const DraggableTaskRow = ({
|
|||||||
}: DraggableTaskRowProps) => {
|
}: DraggableTaskRowProps) => {
|
||||||
const [editingField, setEditingField] = useState<string | null>(null);
|
const [editingField, setEditingField] = useState<string | null>(null);
|
||||||
const [isHovering, setIsHovering] = useState(false);
|
const [isHovering, setIsHovering] = useState(false);
|
||||||
|
const { showToast } = useToast();
|
||||||
|
|
||||||
const [{ isDragging }, drag] = useDrag({
|
const [{ isDragging }, drag] = useDrag({
|
||||||
type: ItemTypes.TASK,
|
type: ItemTypes.TASK,
|
||||||
@@ -252,7 +285,7 @@ const DraggableTaskRow = ({
|
|||||||
} else if (field === 'status') {
|
} else if (field === 'status') {
|
||||||
updates.status = value as Task['status'];
|
updates.status = value as Task['status'];
|
||||||
} else if (field === 'assignee') {
|
} else if (field === 'assignee') {
|
||||||
updates.assignee = { name: value as 'User' | 'Archon' | 'AI IDE Agent', avatar: '' };
|
updates.assignee = { name: value || 'AI IDE Agent', avatar: '' };
|
||||||
} else if (field === 'feature') {
|
} else if (field === 'feature') {
|
||||||
updates.feature = value;
|
updates.feature = value;
|
||||||
}
|
}
|
||||||
@@ -336,32 +369,15 @@ const DraggableTaskRow = ({
|
|||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td className="p-3">
|
<td className="p-3">
|
||||||
<div className="flex items-center justify-center">
|
<EditableCell
|
||||||
<div
|
value={task.assignee?.name || 'AI IDE Agent'}
|
||||||
className={`flex items-center justify-center w-8 h-8 rounded-full border-2 transition-all duration-300 cursor-pointer hover:scale-110 ${getAssigneeGlassStyle(task.assignee?.name || 'User')} ${getAssigneeGlow(task.assignee?.name || 'User')}`}
|
onSave={(value) => handleUpdateField('assignee', value || 'AI IDE Agent')}
|
||||||
onClick={() => setEditingField('assignee')}
|
type="typeahead"
|
||||||
title={`Assignee: ${task.assignee?.name || 'User'}`}
|
isEditing={editingField === 'assignee'}
|
||||||
>
|
onEdit={() => setEditingField('assignee')}
|
||||||
{getAssigneeIcon(task.assignee?.name || 'User')}
|
onCancel={() => setEditingField(null)}
|
||||||
</div>
|
placeholder="AI IDE Agent"
|
||||||
{editingField === 'assignee' && (
|
/>
|
||||||
<div className="absolute z-50 mt-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-lg shadow-lg p-2">
|
|
||||||
<select
|
|
||||||
value={task.assignee?.name || 'User'}
|
|
||||||
onChange={(e) => {
|
|
||||||
handleUpdateField('assignee', e.target.value);
|
|
||||||
setEditingField(null);
|
|
||||||
}}
|
|
||||||
className="bg-white/90 dark:bg-black/90 border border-cyan-300 dark:border-cyan-600 rounded px-2 py-1 text-sm focus:outline-none focus:border-cyan-500"
|
|
||||||
autoFocus
|
|
||||||
>
|
|
||||||
<option value="User">User</option>
|
|
||||||
<option value="Archon">Archon</option>
|
|
||||||
<option value="AI IDE Agent">AI IDE Agent</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</td>
|
</td>
|
||||||
<td className="p-3">
|
<td className="p-3">
|
||||||
<div className="flex justify-center gap-2 opacity-0 group-hover:opacity-100 transition-opacity">
|
<div className="flex justify-center gap-2 opacity-0 group-hover:opacity-100 transition-opacity">
|
||||||
@@ -394,17 +410,21 @@ const DraggableTaskRow = ({
|
|||||||
</button>
|
</button>
|
||||||
{/* Copy Task ID Button - Matching Board View */}
|
{/* Copy Task ID Button - Matching Board View */}
|
||||||
<button
|
<button
|
||||||
type="button"
|
onClick={async (e) => {
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
navigator.clipboard.writeText(task.id);
|
const success = await copyToClipboard(task.id);
|
||||||
// Visual feedback like in board view
|
if (success) {
|
||||||
const button = e.currentTarget;
|
showToast('Task ID copied to clipboard', 'success');
|
||||||
const originalHTML = button.innerHTML;
|
// Visual feedback like in board view
|
||||||
button.innerHTML = '<div class="flex items-center gap-1"><span class="w-3 h-3 text-green-500">✓</span><span class="text-green-500 text-xs">Copied</span></div>';
|
const button = e.currentTarget;
|
||||||
setTimeout(() => {
|
const originalHTML = button.innerHTML;
|
||||||
button.innerHTML = originalHTML;
|
button.innerHTML = '<div class="flex items-center gap-1"><span class="w-3 h-3 text-green-500">✓</span><span class="text-green-500 text-xs">Copied</span></div>';
|
||||||
}, 2000);
|
setTimeout(() => {
|
||||||
|
button.innerHTML = originalHTML;
|
||||||
|
}, 2000);
|
||||||
|
} else {
|
||||||
|
showToast('Failed to copy Task ID', 'error');
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
className="p-1.5 rounded-full bg-gray-500/20 text-gray-500 hover:bg-gray-500/30 hover:shadow-[0_0_10px_rgba(107,114,128,0.3)] transition-all duration-300"
|
className="p-1.5 rounded-full bg-gray-500/20 text-gray-500 hover:bg-gray-500/30 hover:shadow-[0_0_10px_rgba(107,114,128,0.3)] transition-all duration-300"
|
||||||
title="Copy Task ID to clipboard"
|
title="Copy Task ID to clipboard"
|
||||||
@@ -535,18 +555,16 @@ const AddTaskRow = ({ onTaskCreate, tasks, statusFilter }: AddTaskRowProps) => {
|
|||||||
/>
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td className="p-3">
|
<td className="p-3">
|
||||||
<select
|
<AssigneeTypeaheadInput
|
||||||
value={newTask.assignee.name}
|
value={newTask.assignee.name}
|
||||||
onChange={(e) => setNewTask(prev => ({
|
onChange={(value) => setNewTask(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
assignee: { name: e.target.value as 'User' | 'Archon' | 'AI IDE Agent', avatar: '' }
|
assignee: { name: value || 'AI IDE Agent', avatar: '' }
|
||||||
}))}
|
}))}
|
||||||
|
onKeyPress={handleKeyPress}
|
||||||
|
placeholder="AI IDE Agent"
|
||||||
className="w-full bg-white/90 dark:bg-black/90 border border-cyan-300 dark:border-cyan-600 rounded px-2 py-1.5 text-sm focus:outline-none focus:border-cyan-500 focus:shadow-[0_0_5px_rgba(34,211,238,0.3)]"
|
className="w-full bg-white/90 dark:bg-black/90 border border-cyan-300 dark:border-cyan-600 rounded px-2 py-1.5 text-sm focus:outline-none focus:border-cyan-500 focus:shadow-[0_0_5px_rgba(34,211,238,0.3)]"
|
||||||
>
|
/>
|
||||||
<option value="AI IDE Agent">AI IDE Agent</option>
|
|
||||||
<option value="User">User</option>
|
|
||||||
<option value="Archon">Archon</option>
|
|
||||||
</select>
|
|
||||||
</td>
|
</td>
|
||||||
<td className="p-3">
|
<td className="p-3">
|
||||||
<div className="flex justify-center">
|
<div className="flex justify-center">
|
||||||
|
|||||||
@@ -1,18 +1,32 @@
|
|||||||
import React, { useState, useEffect, useCallback, useMemo } from 'react';
|
import React, { useState, useEffect, useCallback, useMemo, useRef } from 'react';
|
||||||
import { Table, LayoutGrid, Plus, Wifi, WifiOff, List } from 'lucide-react';
|
import { Table, LayoutGrid, Plus, Wifi, WifiOff, List, Trash2 } from 'lucide-react';
|
||||||
import { DndProvider } from 'react-dnd';
|
import { DndProvider } from 'react-dnd';
|
||||||
import { HTML5Backend } from 'react-dnd-html5-backend';
|
import { HTML5Backend } from 'react-dnd-html5-backend';
|
||||||
import { Toggle } from '../ui/Toggle';
|
import { Toggle } from '../ui/Toggle';
|
||||||
import { projectService } from '../../services/projectService';
|
import { projectService } from '../../services/projectService';
|
||||||
|
import { getGlobalOperationTracker } from '../../utils/operationTracker';
|
||||||
|
import { Card } from '../ui/card';
|
||||||
|
|
||||||
import { useTaskSocket } from '../../hooks/useTaskSocket';
|
import { useTaskSocket } from '../../hooks/useTaskSocket';
|
||||||
|
import { useOptimisticUpdates } from '../../hooks/useOptimisticUpdates';
|
||||||
import type { CreateTaskRequest, UpdateTaskRequest, DatabaseTaskStatus } from '../../types/project';
|
import type { CreateTaskRequest, UpdateTaskRequest, DatabaseTaskStatus } from '../../types/project';
|
||||||
|
import { WebSocketState } from '../../services/socketIOService';
|
||||||
import { TaskTableView, Task } from './TaskTableView';
|
import { TaskTableView, Task } from './TaskTableView';
|
||||||
import { TaskBoardView } from './TaskBoardView';
|
import { TaskBoardView } from './TaskBoardView';
|
||||||
import { EditTaskModal } from './EditTaskModal';
|
import { EditTaskModal } from './EditTaskModal';
|
||||||
|
import { DeleteConfirmModal } from '../ui/DeleteConfirmModal';
|
||||||
|
|
||||||
// Assignee utilities
|
// Assignee utilities - expanded to include all agent types
|
||||||
const ASSIGNEE_OPTIONS = ['User', 'Archon', 'AI IDE Agent'] as const;
|
const ASSIGNEE_OPTIONS = [
|
||||||
|
'User',
|
||||||
|
'Archon',
|
||||||
|
'AI IDE Agent',
|
||||||
|
'IDE Agent',
|
||||||
|
'prp-executor',
|
||||||
|
'prp-validator'
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
// Delete confirmation modal component
|
||||||
|
|
||||||
// Mapping functions for status conversion
|
// Mapping functions for status conversion
|
||||||
const mapUIStatusToDBStatus = (uiStatus: Task['status']): DatabaseTaskStatus => {
|
const mapUIStatusToDBStatus = (uiStatus: Task['status']): DatabaseTaskStatus => {
|
||||||
@@ -39,15 +53,15 @@ const mapDBStatusToUIStatus = (dbStatus: DatabaseTaskStatus): Task['status'] =>
|
|||||||
const mapDatabaseTaskToUITask = (dbTask: any): Task => {
|
const mapDatabaseTaskToUITask = (dbTask: any): Task => {
|
||||||
return {
|
return {
|
||||||
id: dbTask.id,
|
id: dbTask.id,
|
||||||
title: dbTask.title,
|
title: dbTask.title || '',
|
||||||
description: dbTask.description || '',
|
description: dbTask.description || '',
|
||||||
status: mapDBStatusToUIStatus(dbTask.status),
|
status: mapDBStatusToUIStatus(dbTask.status || 'todo'),
|
||||||
assignee: {
|
assignee: {
|
||||||
name: dbTask.assignee || 'User',
|
name: dbTask.assignee || 'User',
|
||||||
avatar: ''
|
avatar: ''
|
||||||
},
|
},
|
||||||
feature: dbTask.feature || 'General',
|
feature: dbTask.feature || 'General',
|
||||||
featureColor: '#3b82f6', // Default blue color
|
featureColor: dbTask.featureColor || '#3b82f6', // Default blue color
|
||||||
task_order: dbTask.task_order || 0,
|
task_order: dbTask.task_order || 0,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -65,15 +79,36 @@ export const TasksTab = ({
|
|||||||
const [tasks, setTasks] = useState<Task[]>([]);
|
const [tasks, setTasks] = useState<Task[]>([]);
|
||||||
const [editingTask, setEditingTask] = useState<Task | null>(null);
|
const [editingTask, setEditingTask] = useState<Task | null>(null);
|
||||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||||
const [projectFeatures, setProjectFeatures] = useState<any[]>([]);
|
const [projectFeatures, setProjectFeatures] = useState<import('../types/jsonb').ProjectFeature[]>([]);
|
||||||
const [isLoadingFeatures, setIsLoadingFeatures] = useState(false);
|
const [isLoadingFeatures, setIsLoadingFeatures] = useState(false);
|
||||||
const [isSavingTask, setIsSavingTask] = useState<boolean>(false);
|
const [isSavingTask, setIsSavingTask] = useState<boolean>(false);
|
||||||
const [isWebSocketConnected, setIsWebSocketConnected] = useState(false);
|
const [isWebSocketConnected, setIsWebSocketConnected] = useState(false);
|
||||||
|
const [taskToDelete, setTaskToDelete] = useState<Task | null>(null);
|
||||||
|
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||||
|
|
||||||
// Initialize tasks
|
// Use optimistic updates hook for proper echo suppression
|
||||||
|
const { addPendingUpdate, isPendingUpdate, removePendingUpdate } = useOptimisticUpdates<Task>();
|
||||||
|
|
||||||
|
// Track recently deleted tasks to prevent race conditions
|
||||||
|
const recentlyDeletedIdsRef = useRef<Set<string>>(new Set());
|
||||||
|
|
||||||
|
// Track recently created tasks to prevent WebSocket echo
|
||||||
|
const recentlyCreatedIdsRef = useRef<Set<string>>(new Set());
|
||||||
|
|
||||||
|
// Track the project ID to detect when we switch projects
|
||||||
|
const lastProjectId = useRef(projectId);
|
||||||
|
|
||||||
|
// Initialize tasks when component mounts or project changes
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setTasks(initialTasks);
|
// If project changed, always reinitialize
|
||||||
}, [initialTasks]);
|
if (lastProjectId.current !== projectId) {
|
||||||
|
setTasks(initialTasks);
|
||||||
|
lastProjectId.current = projectId;
|
||||||
|
} else if (tasks.length === 0 && initialTasks.length > 0) {
|
||||||
|
// Only initialize if we have no tasks but received initial tasks
|
||||||
|
setTasks(initialTasks);
|
||||||
|
}
|
||||||
|
}, [initialTasks, projectId]);
|
||||||
|
|
||||||
// Load project features on component mount
|
// Load project features on component mount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -85,6 +120,18 @@ export const TasksTab = ({
|
|||||||
const updatedTask = message.data || message;
|
const updatedTask = message.data || message;
|
||||||
const mappedTask = mapDatabaseTaskToUITask(updatedTask);
|
const mappedTask = mapDatabaseTaskToUITask(updatedTask);
|
||||||
|
|
||||||
|
// Skip updates for recently deleted tasks (race condition prevention)
|
||||||
|
if (recentlyDeletedIdsRef.current.has(updatedTask.id)) {
|
||||||
|
console.log('[Socket] Ignoring update for recently deleted task:', updatedTask.id);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is an echo of a local update
|
||||||
|
if (isPendingUpdate(updatedTask.id, mappedTask)) {
|
||||||
|
console.log('[Socket] Skipping echo update for locally updated task:', updatedTask.id);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Skip updates while modal is open for the same task to prevent conflicts
|
// Skip updates while modal is open for the same task to prevent conflicts
|
||||||
if (isModalOpen && editingTask?.id === updatedTask.id) {
|
if (isModalOpen && editingTask?.id === updatedTask.id) {
|
||||||
console.log('[Socket] Skipping update for task being edited:', updatedTask.id);
|
console.log('[Socket] Skipping update for task being edited:', updatedTask.id);
|
||||||
@@ -94,20 +141,15 @@ export const TasksTab = ({
|
|||||||
setTasks(prev => {
|
setTasks(prev => {
|
||||||
// Use server timestamp for conflict resolution
|
// Use server timestamp for conflict resolution
|
||||||
const existingTask = prev.find(task => task.id === updatedTask.id);
|
const existingTask = prev.find(task => task.id === updatedTask.id);
|
||||||
if (existingTask) {
|
|
||||||
// Check if this is a more recent update
|
if (!existingTask) {
|
||||||
const serverTimestamp = message.server_timestamp || Date.now();
|
// Task not found locally, skip the update
|
||||||
const lastUpdate = existingTask.lastUpdate || 0;
|
return prev;
|
||||||
|
|
||||||
if (serverTimestamp <= lastUpdate) {
|
|
||||||
console.log('[Socket] Ignoring stale update for task:', updatedTask.id);
|
|
||||||
return prev;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const updated = prev.map(task =>
|
const updated = prev.map(task =>
|
||||||
task.id === updatedTask.id
|
task.id === updatedTask.id
|
||||||
? { ...mappedTask, lastUpdate: message.server_timestamp || Date.now() }
|
? { ...mappedTask }
|
||||||
: task
|
: task
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -115,11 +157,18 @@ export const TasksTab = ({
|
|||||||
setTimeout(() => onTasksChange(updated), 0);
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
return updated;
|
return updated;
|
||||||
});
|
});
|
||||||
}, [onTasksChange, isModalOpen, editingTask?.id]);
|
}, [onTasksChange, isModalOpen, editingTask?.id, isPendingUpdate]);
|
||||||
|
|
||||||
const handleTaskCreated = useCallback((message: any) => {
|
const handleTaskCreated = useCallback((message: any) => {
|
||||||
const newTask = message.data || message;
|
const newTask = message.data || message;
|
||||||
console.log('🆕 Real-time task created:', newTask);
|
console.log('🆕 Real-time task created:', newTask);
|
||||||
|
|
||||||
|
// Skip if this is our own recently created task
|
||||||
|
if (recentlyCreatedIdsRef.current.has(newTask.id)) {
|
||||||
|
console.log('[Socket] Skipping echo of our own task creation:', newTask.id);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const mappedTask = mapDatabaseTaskToUITask(newTask);
|
const mappedTask = mapDatabaseTaskToUITask(newTask);
|
||||||
|
|
||||||
setTasks(prev => {
|
setTasks(prev => {
|
||||||
@@ -128,7 +177,16 @@ export const TasksTab = ({
|
|||||||
console.log('Task already exists, skipping create');
|
console.log('Task already exists, skipping create');
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
const updated = [...prev, mappedTask];
|
|
||||||
|
// Remove any temp tasks with same title (in case of race condition)
|
||||||
|
const filteredPrev = prev.filter(task => {
|
||||||
|
// Keep non-temp tasks
|
||||||
|
if (!task.id?.startsWith('temp-')) return true;
|
||||||
|
// Remove temp tasks with matching title
|
||||||
|
return task.title !== newTask.title;
|
||||||
|
});
|
||||||
|
|
||||||
|
const updated = [...filteredPrev, mappedTask];
|
||||||
setTimeout(() => onTasksChange(updated), 0);
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
return updated;
|
return updated;
|
||||||
});
|
});
|
||||||
@@ -137,6 +195,10 @@ export const TasksTab = ({
|
|||||||
const handleTaskDeleted = useCallback((message: any) => {
|
const handleTaskDeleted = useCallback((message: any) => {
|
||||||
const deletedTask = message.data || message;
|
const deletedTask = message.data || message;
|
||||||
console.log('🗑️ Real-time task deleted:', deletedTask);
|
console.log('🗑️ Real-time task deleted:', deletedTask);
|
||||||
|
|
||||||
|
// Remove from recently deleted cache when deletion is confirmed
|
||||||
|
recentlyDeletedIdsRef.current.delete(deletedTask.id);
|
||||||
|
|
||||||
setTasks(prev => {
|
setTasks(prev => {
|
||||||
const updated = prev.filter(task => task.id !== deletedTask.id);
|
const updated = prev.filter(task => task.id !== deletedTask.id);
|
||||||
setTimeout(() => onTasksChange(updated), 0);
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
@@ -170,7 +232,7 @@ export const TasksTab = ({
|
|||||||
const initialWebSocketTasks = message.data || message;
|
const initialWebSocketTasks = message.data || message;
|
||||||
const uiTasks: Task[] = initialWebSocketTasks.map(mapDatabaseTaskToUITask);
|
const uiTasks: Task[] = initialWebSocketTasks.map(mapDatabaseTaskToUITask);
|
||||||
setTasks(uiTasks);
|
setTasks(uiTasks);
|
||||||
onTasksChange(uiTasks);
|
setTimeout(() => onTasksChange(uiTasks), 0);
|
||||||
}, [onTasksChange]);
|
}, [onTasksChange]);
|
||||||
|
|
||||||
// Simplified socket connection with better lifecycle management
|
// Simplified socket connection with better lifecycle management
|
||||||
@@ -183,7 +245,7 @@ export const TasksTab = ({
|
|||||||
onTasksReordered: handleTasksReordered,
|
onTasksReordered: handleTasksReordered,
|
||||||
onInitialTasks: handleInitialTasks,
|
onInitialTasks: handleInitialTasks,
|
||||||
onConnectionStateChange: (state) => {
|
onConnectionStateChange: (state) => {
|
||||||
setIsWebSocketConnected(state === 'connected');
|
setIsWebSocketConnected(state === WebSocketState.CONNECTED);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -222,6 +284,30 @@ export const TasksTab = ({
|
|||||||
setEditingTask(task);
|
setEditingTask(task);
|
||||||
|
|
||||||
setIsSavingTask(true);
|
setIsSavingTask(true);
|
||||||
|
|
||||||
|
// Store original task for rollback
|
||||||
|
const originalTask = task.id ? tasks.find(t => t.id === task.id) : null;
|
||||||
|
|
||||||
|
// OPTIMISTIC UPDATE: Update UI immediately for existing tasks
|
||||||
|
if (task.id) {
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = prev.map(t =>
|
||||||
|
t.id === task.id ? task : t
|
||||||
|
);
|
||||||
|
// Notify parent of the change
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mark as pending update to prevent echo
|
||||||
|
addPendingUpdate({
|
||||||
|
id: task.id,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
data: task,
|
||||||
|
operation: 'update'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let parentTaskId = task.id;
|
let parentTaskId = task.id;
|
||||||
|
|
||||||
@@ -259,6 +345,22 @@ export const TasksTab = ({
|
|||||||
closeModal();
|
closeModal();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to save task:', error);
|
console.error('Failed to save task:', error);
|
||||||
|
|
||||||
|
// Rollback optimistic update on error
|
||||||
|
if (task.id && originalTask) {
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = prev.map(t =>
|
||||||
|
t.id === task.id ? originalTask : t
|
||||||
|
);
|
||||||
|
// Notify parent of the rollback
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear pending update tracking
|
||||||
|
removePendingUpdate(task.id);
|
||||||
|
}
|
||||||
|
|
||||||
alert(`Failed to save task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
alert(`Failed to save task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
} finally {
|
} finally {
|
||||||
setIsSavingTask(false);
|
setIsSavingTask(false);
|
||||||
@@ -268,7 +370,7 @@ export const TasksTab = ({
|
|||||||
// Update tasks helper
|
// Update tasks helper
|
||||||
const updateTasks = (newTasks: Task[]) => {
|
const updateTasks = (newTasks: Task[]) => {
|
||||||
setTasks(newTasks);
|
setTasks(newTasks);
|
||||||
onTasksChange(newTasks);
|
setTimeout(() => onTasksChange(newTasks), 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to reorder tasks by status to ensure no gaps (1,2,3...)
|
// Helper function to reorder tasks by status to ensure no gaps (1,2,3...)
|
||||||
@@ -305,29 +407,53 @@ export const TasksTab = ({
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// Improved debounced persistence with better coordination
|
// Batch reorder persistence for efficient updates
|
||||||
|
const debouncedPersistBatchReorder = useMemo(
|
||||||
|
() => debounce(async (tasksToUpdate: Task[]) => {
|
||||||
|
try {
|
||||||
|
console.log(`REORDER: Persisting batch update for ${tasksToUpdate.length} tasks`);
|
||||||
|
|
||||||
|
// Send batch update request to backend
|
||||||
|
// For now, update tasks individually (backend can be optimized later for batch endpoint)
|
||||||
|
const updatePromises = tasksToUpdate.map(task =>
|
||||||
|
projectService.updateTask(task.id, {
|
||||||
|
task_order: task.task_order
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
await Promise.all(updatePromises);
|
||||||
|
console.log('REORDER: Batch reorder persisted successfully');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('REORDER: Failed to persist batch reorder:', error);
|
||||||
|
// Socket will handle state recovery
|
||||||
|
console.log('REORDER: Socket will handle state recovery');
|
||||||
|
}
|
||||||
|
}, 500), // Shorter delay for batch updates
|
||||||
|
[projectId]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Single task persistence (still used for other operations)
|
||||||
const debouncedPersistSingleTask = useMemo(
|
const debouncedPersistSingleTask = useMemo(
|
||||||
() => debounce(async (task: Task) => {
|
() => debounce(async (task: Task) => {
|
||||||
try {
|
try {
|
||||||
console.log('REORDER: Persisting position change for task:', task.title, 'new position:', task.task_order);
|
console.log('REORDER: Persisting position change for task:', task.title, 'new position:', task.task_order);
|
||||||
|
|
||||||
// Update only the moved task with server timestamp for conflict resolution
|
// Update only the moved task
|
||||||
await projectService.updateTask(task.id, {
|
await projectService.updateTask(task.id, {
|
||||||
task_order: task.task_order,
|
task_order: task.task_order
|
||||||
client_timestamp: Date.now()
|
|
||||||
});
|
});
|
||||||
console.log('REORDER: Single task position persisted successfully');
|
console.log('REORDER: Single task position persisted successfully');
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('REORDER: Failed to persist task position:', error);
|
console.error('REORDER: Failed to persist task position:', error);
|
||||||
// Don't reload tasks immediately - let socket handle recovery
|
|
||||||
console.log('REORDER: Socket will handle state recovery');
|
console.log('REORDER: Socket will handle state recovery');
|
||||||
}
|
}
|
||||||
}, 800), // Slightly reduced delay for better responsiveness
|
}, 800),
|
||||||
[projectId]
|
[projectId]
|
||||||
);
|
);
|
||||||
|
|
||||||
// Optimized task reordering without optimistic update conflicts
|
// Standard drag-and-drop reordering with sequential integers (like Jira/Trello/Linear)
|
||||||
const handleTaskReorder = useCallback((taskId: string, targetIndex: number, status: Task['status']) => {
|
const handleTaskReorder = useCallback((taskId: string, targetIndex: number, status: Task['status']) => {
|
||||||
console.log('REORDER: Moving task', taskId, 'to index', targetIndex, 'in status', status);
|
console.log('REORDER: Moving task', taskId, 'to index', targetIndex, 'in status', status);
|
||||||
|
|
||||||
@@ -357,92 +483,99 @@ export const TasksTab = ({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const movingTask = statusTasks[movingTaskIndex];
|
console.log('REORDER: Moving task from position', movingTaskIndex, 'to', targetIndex);
|
||||||
console.log('REORDER: Moving', movingTask.title, 'from', movingTaskIndex, 'to', targetIndex);
|
|
||||||
|
|
||||||
// Calculate new position using improved algorithm
|
// Remove the task from its current position and insert at target position
|
||||||
let newPosition: number;
|
const reorderedTasks = [...statusTasks];
|
||||||
|
const [movedTask] = reorderedTasks.splice(movingTaskIndex, 1);
|
||||||
|
reorderedTasks.splice(targetIndex, 0, movedTask);
|
||||||
|
|
||||||
if (targetIndex === 0) {
|
// Assign sequential order numbers (1, 2, 3, etc.) to all tasks in this status
|
||||||
// Moving to first position
|
const updatedStatusTasks = reorderedTasks.map((task, index) => ({
|
||||||
const firstTask = statusTasks[0];
|
...task,
|
||||||
newPosition = firstTask.task_order / 2;
|
task_order: index + 1,
|
||||||
} else if (targetIndex === statusTasks.length - 1) {
|
lastUpdate: Date.now()
|
||||||
// Moving to last position
|
}));
|
||||||
const lastTask = statusTasks[statusTasks.length - 1];
|
|
||||||
newPosition = lastTask.task_order + 1024;
|
|
||||||
} else {
|
|
||||||
// Moving between two items
|
|
||||||
let prevTask, nextTask;
|
|
||||||
|
|
||||||
if (targetIndex > movingTaskIndex) {
|
|
||||||
// Moving down
|
|
||||||
prevTask = statusTasks[targetIndex];
|
|
||||||
nextTask = statusTasks[targetIndex + 1];
|
|
||||||
} else {
|
|
||||||
// Moving up
|
|
||||||
prevTask = statusTasks[targetIndex - 1];
|
|
||||||
nextTask = statusTasks[targetIndex];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (prevTask && nextTask) {
|
|
||||||
newPosition = (prevTask.task_order + nextTask.task_order) / 2;
|
|
||||||
} else if (prevTask) {
|
|
||||||
newPosition = prevTask.task_order + 1024;
|
|
||||||
} else if (nextTask) {
|
|
||||||
newPosition = nextTask.task_order / 2;
|
|
||||||
} else {
|
|
||||||
newPosition = 1024; // Fallback
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('REORDER: New position calculated:', newPosition);
|
console.log('REORDER: New order:', updatedStatusTasks.map(t => `${t.title}:${t.task_order}`));
|
||||||
|
|
||||||
// Create updated task with new position and timestamp
|
// Update UI immediately with all reordered tasks
|
||||||
const updatedTask = {
|
const allUpdatedTasks = [...otherTasks, ...updatedStatusTasks];
|
||||||
...movingTask,
|
|
||||||
task_order: newPosition,
|
|
||||||
lastUpdate: Date.now() // Add timestamp for conflict resolution
|
|
||||||
};
|
|
||||||
|
|
||||||
// Immediate UI update without optimistic tracking interference
|
|
||||||
const allUpdatedTasks = otherTasks.concat(
|
|
||||||
statusTasks.map(task => task.id === taskId ? updatedTask : task)
|
|
||||||
);
|
|
||||||
updateTasks(allUpdatedTasks);
|
updateTasks(allUpdatedTasks);
|
||||||
|
|
||||||
// Persist to backend (single API call)
|
// Batch update to backend - only update tasks that changed position
|
||||||
debouncedPersistSingleTask(updatedTask);
|
const tasksToUpdate = updatedStatusTasks.filter((task, index) => {
|
||||||
}, [tasks, updateTasks, debouncedPersistSingleTask]);
|
const originalTask = statusTasks.find(t => t.id === task.id);
|
||||||
|
return originalTask && originalTask.task_order !== task.task_order;
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`REORDER: Updating ${tasksToUpdate.length} tasks in backend`);
|
||||||
|
|
||||||
|
// Send batch update to backend (debounced)
|
||||||
|
debouncedPersistBatchReorder(tasksToUpdate);
|
||||||
|
}, [tasks, updateTasks, debouncedPersistBatchReorder]);
|
||||||
|
|
||||||
// Task move function (for board view)
|
// Task move function (for board view) with optimistic UI update
|
||||||
const moveTask = async (taskId: string, newStatus: Task['status']) => {
|
const moveTask = async (taskId: string, newStatus: Task['status']) => {
|
||||||
console.log(`[TasksTab] Attempting to move task ${taskId} to new status: ${newStatus}`);
|
console.log(`[TasksTab] Attempting to move task ${taskId} to new status: ${newStatus}`);
|
||||||
|
|
||||||
|
const movingTask = tasks.find(task => task.id === taskId);
|
||||||
|
if (!movingTask) {
|
||||||
|
console.warn(`[TasksTab] Task ${taskId} not found for move operation.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const oldStatus = movingTask.status;
|
||||||
|
const newOrder = getNextOrderForStatus(newStatus);
|
||||||
|
const updatedTask = { ...movingTask, status: newStatus, task_order: newOrder };
|
||||||
|
|
||||||
|
console.log(`[TasksTab] Moving task ${movingTask.title} from ${oldStatus} to ${newStatus} with order ${newOrder}`);
|
||||||
|
|
||||||
|
// OPTIMISTIC UPDATE: Update UI immediately
|
||||||
|
console.log(`[TasksTab] Applying optimistic move for task ${taskId} to ${newStatus}`);
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = prev.map(task => task.id === taskId ? updatedTask : task);
|
||||||
|
console.log(`[TasksTab] Tasks after optimistic move:`, updated);
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
console.log(`[TasksTab] Optimistically updated UI for task ${taskId}`);
|
||||||
|
|
||||||
|
// Mark as pending update to prevent echo when socket update arrives
|
||||||
|
const taskToUpdate = tasks.find(t => t.id === taskId);
|
||||||
|
if (taskToUpdate) {
|
||||||
|
const updatedTask = { ...taskToUpdate, status: newStatus, task_order: newOrder };
|
||||||
|
addPendingUpdate({
|
||||||
|
id: taskId,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
data: updatedTask,
|
||||||
|
operation: 'update'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const movingTask = tasks.find(task => task.id === taskId);
|
// Then update the backend
|
||||||
if (!movingTask) {
|
|
||||||
console.warn(`[TasksTab] Task ${taskId} not found for move operation.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const oldStatus = movingTask.status;
|
|
||||||
const newOrder = getNextOrderForStatus(newStatus);
|
|
||||||
|
|
||||||
console.log(`[TasksTab] Moving task ${movingTask.title} from ${oldStatus} to ${newStatus} with order ${newOrder}`);
|
|
||||||
|
|
||||||
// Update the task with new status and order
|
|
||||||
await projectService.updateTask(taskId, {
|
await projectService.updateTask(taskId, {
|
||||||
status: mapUIStatusToDBStatus(newStatus),
|
status: mapUIStatusToDBStatus(newStatus),
|
||||||
task_order: newOrder,
|
task_order: newOrder
|
||||||
client_timestamp: Date.now()
|
|
||||||
});
|
});
|
||||||
console.log(`[TasksTab] Successfully updated task ${taskId} status in backend.`);
|
console.log(`[TasksTab] Successfully updated task ${taskId} status in backend.`);
|
||||||
|
|
||||||
// Don't update local state immediately - let socket handle it
|
// Socket will confirm the update, but UI is already updated
|
||||||
console.log(`[TasksTab] Waiting for socket update for task ${taskId}.`);
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[TasksTab] Failed to move task ${taskId}:`, error);
|
console.error(`[TasksTab] Failed to move task ${taskId}, rolling back:`, error);
|
||||||
|
|
||||||
|
// ROLLBACK on error - restore original task
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = prev.map(task => task.id === taskId ? movingTask : task);
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear the pending update marker
|
||||||
|
removePendingUpdate(taskId);
|
||||||
|
|
||||||
alert(`Failed to move task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
alert(`Failed to move task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -453,25 +586,79 @@ export const TasksTab = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const deleteTask = async (task: Task) => {
|
const deleteTask = async (task: Task) => {
|
||||||
|
// Set the task to delete and show confirmation modal
|
||||||
|
setTaskToDelete(task);
|
||||||
|
setShowDeleteConfirm(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const confirmDeleteTask = async () => {
|
||||||
|
if (!taskToDelete) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Delete the task - backend will emit socket event
|
// Add to recently deleted cache to prevent race conditions
|
||||||
await projectService.deleteTask(task.id);
|
recentlyDeletedIdsRef.current.add(taskToDelete.id);
|
||||||
console.log(`[TasksTab] Task ${task.id} deletion sent to backend`);
|
|
||||||
|
|
||||||
// Don't update local state - let socket handle it
|
// OPTIMISTIC UPDATE: Remove task from UI immediately
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = prev.filter(t => t.id !== taskToDelete.id);
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
console.log(`[TasksTab] Optimistically removed task ${taskToDelete.id} from UI`);
|
||||||
|
|
||||||
|
// Then delete from backend
|
||||||
|
await projectService.deleteTask(taskToDelete.id);
|
||||||
|
console.log(`[TasksTab] Task ${taskToDelete.id} deletion confirmed by backend`);
|
||||||
|
|
||||||
|
// Clear from recently deleted cache after a delay (to catch any lingering socket events)
|
||||||
|
setTimeout(() => {
|
||||||
|
recentlyDeletedIdsRef.current.delete(taskToDelete.id);
|
||||||
|
}, 3000); // 3 second window to ignore stale socket events
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to delete task:', error);
|
console.error('Failed to delete task:', error);
|
||||||
// Note: The toast notification for deletion is now handled by TaskBoardView and TaskTableView
|
|
||||||
|
// Remove from recently deleted cache on error
|
||||||
|
recentlyDeletedIdsRef.current.delete(taskToDelete.id);
|
||||||
|
|
||||||
|
// ROLLBACK on error - restore the task
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = [...prev, taskToDelete].sort((a, b) => a.task_order - b.task_order);
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
console.log(`[TasksTab] Rolled back task deletion for ${taskToDelete.id}`);
|
||||||
|
|
||||||
|
// Re-throw to let the calling component handle the error display
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
setTaskToDelete(null);
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Inline task creation function
|
// Inline task creation function with optimistic update
|
||||||
const createTaskInline = async (newTask: Omit<Task, 'id'>) => {
|
const createTaskInline = async (newTask: Omit<Task, 'id'>) => {
|
||||||
|
// Create temporary task with a temp ID for optimistic update
|
||||||
|
const tempId = `temp-${Date.now()}`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Auto-assign next order number if not provided
|
// Auto-assign next order number if not provided
|
||||||
const nextOrder = newTask.task_order || getNextOrderForStatus(newTask.status);
|
const nextOrder = newTask.task_order || getNextOrderForStatus(newTask.status);
|
||||||
|
|
||||||
|
const tempTask: Task = {
|
||||||
|
...newTask,
|
||||||
|
id: tempId,
|
||||||
|
task_order: nextOrder
|
||||||
|
};
|
||||||
|
|
||||||
|
// OPTIMISTIC UPDATE: Add to UI immediately
|
||||||
|
setTasks(prev => {
|
||||||
|
const updated = [...prev, tempTask];
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
const createData: CreateTaskRequest = {
|
const createData: CreateTaskRequest = {
|
||||||
project_id: projectId,
|
project_id: projectId,
|
||||||
title: newTask.title,
|
title: newTask.title,
|
||||||
@@ -483,13 +670,32 @@ export const TasksTab = ({
|
|||||||
...(newTask.featureColor && { featureColor: newTask.featureColor })
|
...(newTask.featureColor && { featureColor: newTask.featureColor })
|
||||||
};
|
};
|
||||||
|
|
||||||
await projectService.createTask(createData);
|
const createdTask = await projectService.createTask(createData);
|
||||||
|
const mappedCreatedTask = mapDatabaseTaskToUITask(createdTask);
|
||||||
|
|
||||||
|
// Add to recently created to prevent WebSocket echo from duplicating
|
||||||
|
recentlyCreatedIdsRef.current.add(createdTask.id);
|
||||||
|
setTimeout(() => {
|
||||||
|
recentlyCreatedIdsRef.current.delete(createdTask.id);
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
|
// Replace temp task with real one
|
||||||
|
setTasks(prev => {
|
||||||
|
// Find and replace the temp task
|
||||||
|
const updated = prev.map(t =>
|
||||||
|
t.id === tempId ? mappedCreatedTask : t
|
||||||
|
);
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
// Don't reload tasks - let socket updates handle synchronization
|
|
||||||
console.log('[TasksTab] Task creation sent to backend, waiting for socket update');
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to create task:', error);
|
console.error('Failed to create task:', error);
|
||||||
|
|
||||||
|
// Rollback: Remove temp task on error
|
||||||
|
setTasks(prev => prev.filter(t => t.id !== tempId));
|
||||||
|
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -497,10 +703,38 @@ export const TasksTab = ({
|
|||||||
// Inline task update function
|
// Inline task update function
|
||||||
const updateTaskInline = async (taskId: string, updates: Partial<Task>) => {
|
const updateTaskInline = async (taskId: string, updates: Partial<Task>) => {
|
||||||
console.log(`[TasksTab] Inline update for task ${taskId} with updates:`, updates);
|
console.log(`[TasksTab] Inline update for task ${taskId} with updates:`, updates);
|
||||||
|
|
||||||
|
// Store the original task for potential rollback
|
||||||
|
const originalTask = tasks.find(t => t.id === taskId);
|
||||||
|
|
||||||
|
// Optimistically update the UI immediately
|
||||||
|
console.log(`[TasksTab] Applying optimistic update for task ${taskId}`, updates);
|
||||||
|
setTasks(prevTasks => {
|
||||||
|
const updated = prevTasks.map(task =>
|
||||||
|
task.id === taskId
|
||||||
|
? { ...task, ...updates }
|
||||||
|
: task
|
||||||
|
);
|
||||||
|
console.log(`[TasksTab] Tasks after optimistic update:`, updated);
|
||||||
|
// Notify parent of the optimistic update
|
||||||
|
setTimeout(() => onTasksChange(updated), 0);
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mark as pending update to prevent echo when socket update arrives
|
||||||
|
const taskToUpdate = tasks.find(t => t.id === taskId);
|
||||||
|
if (taskToUpdate) {
|
||||||
|
const updatedTask = { ...taskToUpdate, ...updates };
|
||||||
|
addPendingUpdate({
|
||||||
|
id: taskId,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
data: updatedTask,
|
||||||
|
operation: 'update'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const updateData: Partial<UpdateTaskRequest> = {
|
const updateData: Partial<UpdateTaskRequest> = {};
|
||||||
client_timestamp: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
if (updates.title !== undefined) updateData.title = updates.title;
|
if (updates.title !== undefined) updateData.title = updates.title;
|
||||||
if (updates.description !== undefined) updateData.description = updates.description;
|
if (updates.description !== undefined) updateData.description = updates.description;
|
||||||
@@ -518,11 +752,21 @@ export const TasksTab = ({
|
|||||||
await projectService.updateTask(taskId, updateData);
|
await projectService.updateTask(taskId, updateData);
|
||||||
console.log(`[TasksTab] projectService.updateTask successful for ${taskId}.`);
|
console.log(`[TasksTab] projectService.updateTask successful for ${taskId}.`);
|
||||||
|
|
||||||
// Don't update local state optimistically - let socket handle it
|
|
||||||
console.log(`[TasksTab] Waiting for socket update for task ${taskId}.`);
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[TasksTab] Failed to update task ${taskId} inline:`, error);
|
console.error(`[TasksTab] Failed to update task ${taskId} inline:`, error);
|
||||||
|
|
||||||
|
// Revert the optimistic update on error
|
||||||
|
if (originalTask) {
|
||||||
|
setTasks(prevTasks =>
|
||||||
|
prevTasks.map(task =>
|
||||||
|
task.id === taskId ? originalTask : task
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the pending update marker
|
||||||
|
removePendingUpdate(taskId);
|
||||||
|
|
||||||
alert(`Failed to update task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
alert(`Failed to update task: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
@@ -580,7 +824,7 @@ export const TasksTab = ({
|
|||||||
<div className="relative h-[calc(100vh-220px)] overflow-auto">
|
<div className="relative h-[calc(100vh-220px)] overflow-auto">
|
||||||
{viewMode === 'table' ? (
|
{viewMode === 'table' ? (
|
||||||
<TaskTableView
|
<TaskTableView
|
||||||
tasks={tasks}
|
tasks={tasks.filter(t => t && t.id && t.title !== undefined)}
|
||||||
onTaskView={openEditModal}
|
onTaskView={openEditModal}
|
||||||
onTaskComplete={completeTask}
|
onTaskComplete={completeTask}
|
||||||
onTaskDelete={deleteTask}
|
onTaskDelete={deleteTask}
|
||||||
@@ -590,7 +834,7 @@ export const TasksTab = ({
|
|||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<TaskBoardView
|
<TaskBoardView
|
||||||
tasks={tasks}
|
tasks={tasks.filter(t => t && t.id && t.title !== undefined)}
|
||||||
onTaskView={openEditModal}
|
onTaskView={openEditModal}
|
||||||
onTaskComplete={completeTask}
|
onTaskComplete={completeTask}
|
||||||
onTaskDelete={deleteTask}
|
onTaskDelete={deleteTask}
|
||||||
@@ -674,6 +918,19 @@ export const TasksTab = ({
|
|||||||
onSave={saveTask}
|
onSave={saveTask}
|
||||||
getTasksForPrioritySelection={memoizedGetTasksForPrioritySelection}
|
getTasksForPrioritySelection={memoizedGetTasksForPrioritySelection}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* Delete Confirmation Modal */}
|
||||||
|
{showDeleteConfirm && taskToDelete && (
|
||||||
|
<DeleteConfirmModal
|
||||||
|
itemName={taskToDelete.title}
|
||||||
|
onConfirm={confirmDeleteTask}
|
||||||
|
onCancel={() => {
|
||||||
|
setTaskToDelete(null);
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
}}
|
||||||
|
type="task"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</DndProvider>
|
</DndProvider>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { X, Clock, RotateCcw, Eye, Calendar, User, FileText, Diff, GitBranch, Layers, Plus, Minus, AlertTriangle } from 'lucide-react';
|
import { X, Clock, RotateCcw, Eye, Calendar, User, FileText, Diff, GitBranch, Layers, Plus, Minus, AlertTriangle } from 'lucide-react';
|
||||||
import projectService from '../../services/projectService';
|
import { projectService } from '../../services/projectService';
|
||||||
import { Button } from '../ui/Button';
|
import { Button } from '../ui/Button';
|
||||||
import { useToast } from '../../contexts/ToastContext';
|
import { useToast } from '../../contexts/ToastContext';
|
||||||
|
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ function formatSectionContent(value: any): string {
|
|||||||
/**
|
/**
|
||||||
* Formats array content as markdown list or nested structure
|
* Formats array content as markdown list or nested structure
|
||||||
*/
|
*/
|
||||||
function formatArrayContent(array: any[]): string {
|
function formatArrayContent(array: unknown[]): string {
|
||||||
if (array.length === 0) {
|
if (array.length === 0) {
|
||||||
return '_No items_';
|
return '_No items_';
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ export interface PRPPersona {
|
|||||||
export interface PRPPhase {
|
export interface PRPPhase {
|
||||||
duration?: string;
|
duration?: string;
|
||||||
deliverables?: string[];
|
deliverables?: string[];
|
||||||
tasks?: any[];
|
tasks?: Array<{title: string; files: string[]; details: string}>;
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -30,11 +30,11 @@ export const IDEGlobalRules = () => {
|
|||||||
|
|
||||||
**MANDATORY: Always complete the full Archon specific task cycle before any coding:**
|
**MANDATORY: Always complete the full Archon specific task cycle before any coding:**
|
||||||
|
|
||||||
1. **Check Current Task** → \`archon:manage_task(action="get", task_id="...")\`
|
1. **Check Current Task** → \`archon:get_task(task_id="...")\`
|
||||||
2. **Research for Task** → \`archon:search_code_examples()\` + \`archon:perform_rag_query()\`
|
2. **Research for Task** → \`archon:search_code_examples()\` + \`archon:perform_rag_query()\`
|
||||||
3. **Implement the Task** → Write code based on research
|
3. **Implement the Task** → Write code based on research
|
||||||
4. **Update Task Status** → \`archon:manage_task(action="update", task_id="...", update_fields={"status": "review"})\`
|
4. **Update Task Status** → \`archon:update_task(task_id="...", status="review")\`
|
||||||
5. **Get Next Task** → \`archon:manage_task(action="list", filter_by="status", filter_value="todo")\`
|
5. **Get Next Task** → \`archon:list_tasks(filter_by="status", filter_value="todo")\`
|
||||||
6. **Repeat Cycle**
|
6. **Repeat Cycle**
|
||||||
|
|
||||||
**NEVER skip task updates with the Archon MCP server. NEVER code without checking current tasks first.**
|
**NEVER skip task updates with the Archon MCP server. NEVER code without checking current tasks first.**
|
||||||
@@ -45,8 +45,7 @@ export const IDEGlobalRules = () => {
|
|||||||
|
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
# Create project container
|
# Create project container
|
||||||
archon:manage_project(
|
archon:create_project(
|
||||||
action="create",
|
|
||||||
title="Descriptive Project Name",
|
title="Descriptive Project Name",
|
||||||
github_repo="github.com/user/repo-name"
|
github_repo="github.com/user/repo-name"
|
||||||
)
|
)
|
||||||
@@ -60,7 +59,7 @@ archon:manage_project(
|
|||||||
# First, analyze existing codebase thoroughly
|
# First, analyze existing codebase thoroughly
|
||||||
# Read all major files, understand architecture, identify current state
|
# Read all major files, understand architecture, identify current state
|
||||||
# Then create project container
|
# Then create project container
|
||||||
archon:manage_project(action="create", title="Existing Project Name")
|
archon:create_project(title="Existing Project Name")
|
||||||
|
|
||||||
# Research current tech stack and create tasks for remaining work
|
# Research current tech stack and create tasks for remaining work
|
||||||
# Focus on what needs to be built, not what already exists
|
# Focus on what needs to be built, not what already exists
|
||||||
@@ -70,7 +69,7 @@ archon:manage_project(action="create", title="Existing Project Name")
|
|||||||
|
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
# Check existing project status
|
# Check existing project status
|
||||||
archon:manage_task(action="list", filter_by="project", filter_value="[project_id]")
|
archon:list_tasks(filter_by="project", filter_value="[project_id]")
|
||||||
|
|
||||||
# Pick up where you left off - no new project creation needed
|
# Pick up where you left off - no new project creation needed
|
||||||
# Continue with standard development iteration workflow
|
# Continue with standard development iteration workflow
|
||||||
@@ -101,16 +100,14 @@ archon:search_code_examples(query="[specific feature] implementation", match_cou
|
|||||||
|
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
# Get current project status
|
# Get current project status
|
||||||
archon:manage_task(
|
archon:list_tasks(
|
||||||
action="list",
|
|
||||||
filter_by="project",
|
filter_by="project",
|
||||||
filter_value="[project_id]",
|
filter_value="[project_id]",
|
||||||
include_closed=false
|
include_closed=false
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get next priority task
|
# Get next priority task
|
||||||
archon:manage_task(
|
archon:list_tasks(
|
||||||
action="list",
|
|
||||||
filter_by="status",
|
filter_by="status",
|
||||||
filter_value="todo",
|
filter_value="todo",
|
||||||
project_id="[project_id]"
|
project_id="[project_id]"
|
||||||
@@ -150,15 +147,14 @@ archon:search_code_examples(
|
|||||||
|
|
||||||
**1. Get Task Details:**
|
**1. Get Task Details:**
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
archon:manage_task(action="get", task_id="[current_task_id]")
|
archon:get_task(task_id="[current_task_id]")
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
**2. Update to In-Progress:**
|
**2. Update to In-Progress:**
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
archon:manage_task(
|
archon:update_task(
|
||||||
action="update",
|
|
||||||
task_id="[current_task_id]",
|
task_id="[current_task_id]",
|
||||||
update_fields={"status": "doing"}
|
status="doing"
|
||||||
)
|
)
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
@@ -170,10 +166,9 @@ archon:manage_task(
|
|||||||
**4. Complete Task:**
|
**4. Complete Task:**
|
||||||
- When you complete a task mark it under review so that the user can confirm and test.
|
- When you complete a task mark it under review so that the user can confirm and test.
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
archon:manage_task(
|
archon:update_task(
|
||||||
action="update",
|
|
||||||
task_id="[current_task_id]",
|
task_id="[current_task_id]",
|
||||||
update_fields={"status": "review"}
|
status="review"
|
||||||
)
|
)
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
@@ -225,7 +220,7 @@ archon:search_code_examples(query="PostgreSQL connection pooling Node.js", match
|
|||||||
**Start of each coding session:**
|
**Start of each coding session:**
|
||||||
|
|
||||||
1. Check available sources: \`archon:get_available_sources()\`
|
1. Check available sources: \`archon:get_available_sources()\`
|
||||||
2. Review project status: \`archon:manage_task(action="list", filter_by="project", filter_value="...")\`
|
2. Review project status: \`archon:list_tasks(filter_by="project", filter_value="...")\`
|
||||||
3. Identify next priority task: Find highest \`task_order\` in "todo" status
|
3. Identify next priority task: Find highest \`task_order\` in "todo" status
|
||||||
4. Conduct task-specific research
|
4. Conduct task-specific research
|
||||||
5. Begin implementation
|
5. Begin implementation
|
||||||
@@ -247,17 +242,15 @@ archon:search_code_examples(query="PostgreSQL connection pooling Node.js", match
|
|||||||
**Status Update Examples:**
|
**Status Update Examples:**
|
||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
# Move to review when implementation complete but needs testing
|
# Move to review when implementation complete but needs testing
|
||||||
archon:manage_task(
|
archon:update_task(
|
||||||
action="update",
|
|
||||||
task_id="...",
|
task_id="...",
|
||||||
update_fields={"status": "review"}
|
status="review"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Complete task after review passes
|
# Complete task after review passes
|
||||||
archon:manage_task(
|
archon:update_task(
|
||||||
action="update",
|
|
||||||
task_id="...",
|
task_id="...",
|
||||||
update_fields={"status": "done"}
|
status="done"
|
||||||
)
|
)
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
@@ -291,8 +284,7 @@ archon:manage_task(
|
|||||||
archon:get_project_features(project_id="...")
|
archon:get_project_features(project_id="...")
|
||||||
|
|
||||||
# Create tasks aligned with features
|
# Create tasks aligned with features
|
||||||
archon:manage_task(
|
archon:create_task(
|
||||||
action="create",
|
|
||||||
project_id="...",
|
project_id="...",
|
||||||
title="...",
|
title="...",
|
||||||
feature="Authentication", # Align with project features
|
feature="Authentication", # Align with project features
|
||||||
|
|||||||
83
archon-ui-main/src/components/ui/DeleteConfirmModal.tsx
Normal file
83
archon-ui-main/src/components/ui/DeleteConfirmModal.tsx
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { Trash2 } from 'lucide-react';
|
||||||
|
|
||||||
|
export interface DeleteConfirmModalProps {
|
||||||
|
itemName: string;
|
||||||
|
onConfirm: () => void;
|
||||||
|
onCancel: () => void;
|
||||||
|
type: 'project' | 'task' | 'client' | 'document' | 'knowledge-items' | 'feature' | 'data';
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DeleteConfirmModal: React.FC<DeleteConfirmModalProps> = ({ itemName, onConfirm, onCancel, type }) => {
|
||||||
|
const getTitle = () => {
|
||||||
|
switch (type) {
|
||||||
|
case 'project': return 'Delete Project';
|
||||||
|
case 'task': return 'Delete Task';
|
||||||
|
case 'client': return 'Delete MCP Client';
|
||||||
|
case 'document': return 'Delete Document';
|
||||||
|
case 'knowledge-items': return 'Delete Knowledge Items';
|
||||||
|
case 'feature': return 'Delete Feature';
|
||||||
|
case 'data': return 'Delete Data';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getMessage = () => {
|
||||||
|
switch (type) {
|
||||||
|
case 'project': return `Are you sure you want to delete the "${itemName}" project? This will also delete all associated tasks and documents and cannot be undone.`;
|
||||||
|
case 'task': return `Are you sure you want to delete the "${itemName}" task? This action cannot be undone.`;
|
||||||
|
case 'client': return `Are you sure you want to delete the "${itemName}" client? This will permanently remove its configuration and cannot be undone.`;
|
||||||
|
case 'document': return `Are you sure you want to delete the "${itemName}" document? This action cannot be undone.`;
|
||||||
|
case 'knowledge-items': return `Are you sure you want to delete ${itemName}? This will permanently remove the selected items from your knowledge base and cannot be undone.`;
|
||||||
|
case 'feature': return `Are you sure you want to delete the "${itemName}" feature? This action cannot be undone.`;
|
||||||
|
case 'data': return `Are you sure you want to delete this data? This action cannot be undone.`;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm flex items-center justify-center z-50">
|
||||||
|
<div className="relative p-6 rounded-md backdrop-blur-md w-full max-w-md
|
||||||
|
bg-gradient-to-b from-white/80 to-white/60 dark:from-white/10 dark:to-black/30
|
||||||
|
border border-gray-200 dark:border-zinc-800/50
|
||||||
|
shadow-[0_10px_30px_-15px_rgba(0,0,0,0.1)] dark:shadow-[0_10px_30px_-15px_rgba(0,0,0,0.7)]
|
||||||
|
before:content-[''] before:absolute before:top-0 before:left-0 before:right-0 before:h-[2px]
|
||||||
|
before:rounded-t-[4px] before:bg-red-500
|
||||||
|
before:shadow-[0_0_10px_2px_rgba(239,68,68,0.4)] dark:before:shadow-[0_0_20px_5px_rgba(239,68,68,0.7)]">
|
||||||
|
|
||||||
|
<div className="relative z-10">
|
||||||
|
<div className="flex items-center gap-3 mb-4">
|
||||||
|
<div className="w-12 h-12 rounded-full bg-red-100 dark:bg-red-900/30 flex items-center justify-center">
|
||||||
|
<Trash2 className="w-6 h-6 text-red-600 dark:text-red-400" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-semibold text-gray-800 dark:text-white">
|
||||||
|
{getTitle()}
|
||||||
|
</h3>
|
||||||
|
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||||
|
This action cannot be undone
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="text-gray-700 dark:text-gray-300 mb-6">
|
||||||
|
{getMessage()}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="flex justify-end gap-3">
|
||||||
|
<button
|
||||||
|
onClick={onCancel}
|
||||||
|
className="px-4 py-2 text-gray-700 dark:text-gray-300 bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 rounded-md transition-colors"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={onConfirm}
|
||||||
|
className="px-4 py-2 bg-red-600 hover:bg-red-700 text-white rounded-md transition-colors shadow-lg shadow-red-600/20"
|
||||||
|
>
|
||||||
|
Delete
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
* approach that avoids conflicts and connection issues.
|
* approach that avoids conflicts and connection issues.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { useEffect, useRef, useCallback } from 'react';
|
import { useEffect, useRef, useCallback, useState } from 'react';
|
||||||
import { taskSocketService, TaskSocketEvents } from '../services/taskSocketService';
|
import { taskSocketService, TaskSocketEvents } from '../services/taskSocketService';
|
||||||
import { WebSocketState } from '../services/socketIOService';
|
import { WebSocketState } from '../services/socketIOService';
|
||||||
|
|
||||||
@@ -36,6 +36,10 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
const componentIdRef = useRef<string>(`task-socket-${Math.random().toString(36).substring(7)}`);
|
const componentIdRef = useRef<string>(`task-socket-${Math.random().toString(36).substring(7)}`);
|
||||||
const currentProjectIdRef = useRef<string | null>(null);
|
const currentProjectIdRef = useRef<string | null>(null);
|
||||||
const isInitializedRef = useRef<boolean>(false);
|
const isInitializedRef = useRef<boolean>(false);
|
||||||
|
|
||||||
|
// Add reactive state for connection status
|
||||||
|
const [isConnected, setIsConnected] = useState<boolean>(false);
|
||||||
|
const [connectionState, setConnectionState] = useState<WebSocketState>(WebSocketState.DISCONNECTED);
|
||||||
|
|
||||||
// Memoized handlers to prevent unnecessary re-registrations
|
// Memoized handlers to prevent unnecessary re-registrations
|
||||||
const memoizedHandlers = useCallback((): Partial<TaskSocketEvents> => {
|
const memoizedHandlers = useCallback((): Partial<TaskSocketEvents> => {
|
||||||
@@ -58,6 +62,44 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
onConnectionStateChange
|
onConnectionStateChange
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// Subscribe to connection state changes
|
||||||
|
useEffect(() => {
|
||||||
|
const checkConnection = () => {
|
||||||
|
const connected = taskSocketService.isConnected();
|
||||||
|
const state = taskSocketService.getConnectionState();
|
||||||
|
setIsConnected(connected);
|
||||||
|
setConnectionState(state);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check initial state
|
||||||
|
checkConnection();
|
||||||
|
|
||||||
|
// Poll for connection state changes (since the service doesn't expose event emitters)
|
||||||
|
const interval = setInterval(checkConnection, 500);
|
||||||
|
|
||||||
|
// Also trigger when connection state handler is called
|
||||||
|
const wrappedOnConnectionStateChange = onConnectionStateChange ? (state: WebSocketState) => {
|
||||||
|
setConnectionState(state);
|
||||||
|
setIsConnected(state === WebSocketState.CONNECTED);
|
||||||
|
onConnectionStateChange(state);
|
||||||
|
} : (state: WebSocketState) => {
|
||||||
|
setConnectionState(state);
|
||||||
|
setIsConnected(state === WebSocketState.CONNECTED);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update the handler
|
||||||
|
if (componentIdRef.current && taskSocketService) {
|
||||||
|
taskSocketService.registerHandlers(componentIdRef.current, {
|
||||||
|
...memoizedHandlers(),
|
||||||
|
onConnectionStateChange: wrappedOnConnectionStateChange
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
clearInterval(interval);
|
||||||
|
};
|
||||||
|
}, []); // No dependencies - only run once on mount
|
||||||
|
|
||||||
// Initialize connection once and register handlers
|
// Initialize connection once and register handlers
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!projectId || isInitializedRef.current) return;
|
if (!projectId || isInitializedRef.current) return;
|
||||||
@@ -65,6 +107,7 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
const initializeConnection = async () => {
|
const initializeConnection = async () => {
|
||||||
try {
|
try {
|
||||||
console.log(`[USE_TASK_SOCKET] Initializing connection for project: ${projectId}`);
|
console.log(`[USE_TASK_SOCKET] Initializing connection for project: ${projectId}`);
|
||||||
|
setConnectionState(WebSocketState.CONNECTING);
|
||||||
|
|
||||||
// Register handlers first
|
// Register handlers first
|
||||||
taskSocketService.registerHandlers(componentIdRef.current, memoizedHandlers());
|
taskSocketService.registerHandlers(componentIdRef.current, memoizedHandlers());
|
||||||
@@ -76,22 +119,20 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
isInitializedRef.current = true;
|
isInitializedRef.current = true;
|
||||||
console.log(`[USE_TASK_SOCKET] Successfully initialized for project: ${projectId}`);
|
console.log(`[USE_TASK_SOCKET] Successfully initialized for project: ${projectId}`);
|
||||||
|
|
||||||
|
// Update connection state after successful connection
|
||||||
|
setIsConnected(taskSocketService.isConnected());
|
||||||
|
setConnectionState(taskSocketService.getConnectionState());
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[USE_TASK_SOCKET] Failed to initialize for project ${projectId}:`, error);
|
console.error(`[USE_TASK_SOCKET] Failed to initialize for project ${projectId}:`, error);
|
||||||
|
setConnectionState(WebSocketState.DISCONNECTED);
|
||||||
|
setIsConnected(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
initializeConnection();
|
initializeConnection();
|
||||||
|
|
||||||
}, [projectId, memoizedHandlers]);
|
}, [projectId]); // Only depend on projectId
|
||||||
|
|
||||||
// Update handlers when they change (without reconnecting)
|
|
||||||
useEffect(() => {
|
|
||||||
if (isInitializedRef.current && currentProjectIdRef.current === projectId) {
|
|
||||||
console.log(`[USE_TASK_SOCKET] Updating handlers for component: ${componentIdRef.current}`);
|
|
||||||
taskSocketService.registerHandlers(componentIdRef.current, memoizedHandlers());
|
|
||||||
}
|
|
||||||
}, [memoizedHandlers, projectId]);
|
|
||||||
|
|
||||||
// Handle project change (different project)
|
// Handle project change (different project)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -103,6 +144,8 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
|
|
||||||
const switchProject = async () => {
|
const switchProject = async () => {
|
||||||
try {
|
try {
|
||||||
|
setConnectionState(WebSocketState.CONNECTING);
|
||||||
|
|
||||||
// Update handlers for new project
|
// Update handlers for new project
|
||||||
taskSocketService.registerHandlers(componentIdRef.current, memoizedHandlers());
|
taskSocketService.registerHandlers(componentIdRef.current, memoizedHandlers());
|
||||||
|
|
||||||
@@ -112,14 +155,20 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
currentProjectIdRef.current = projectId;
|
currentProjectIdRef.current = projectId;
|
||||||
console.log(`[USE_TASK_SOCKET] Successfully switched to project: ${projectId}`);
|
console.log(`[USE_TASK_SOCKET] Successfully switched to project: ${projectId}`);
|
||||||
|
|
||||||
|
// Update connection state
|
||||||
|
setIsConnected(taskSocketService.isConnected());
|
||||||
|
setConnectionState(taskSocketService.getConnectionState());
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[USE_TASK_SOCKET] Failed to switch to project ${projectId}:`, error);
|
console.error(`[USE_TASK_SOCKET] Failed to switch to project ${projectId}:`, error);
|
||||||
|
setConnectionState(WebSocketState.DISCONNECTED);
|
||||||
|
setIsConnected(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
switchProject();
|
switchProject();
|
||||||
}
|
}
|
||||||
}, [projectId, memoizedHandlers]);
|
}, [projectId]); // Only depend on projectId
|
||||||
|
|
||||||
// Cleanup on unmount
|
// Cleanup on unmount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -132,10 +181,10 @@ export function useTaskSocket(options: UseTaskSocketOptions) {
|
|||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Return utility functions
|
// Return reactive state and utility functions
|
||||||
return {
|
return {
|
||||||
isConnected: taskSocketService.isConnected(),
|
isConnected, // Now reactive!
|
||||||
connectionState: taskSocketService.getConnectionState(),
|
connectionState, // Now reactive!
|
||||||
reconnect: taskSocketService.reconnect.bind(taskSocketService),
|
reconnect: taskSocketService.reconnect.bind(taskSocketService),
|
||||||
getCurrentProjectId: taskSocketService.getCurrentProjectId.bind(taskSocketService)
|
getCurrentProjectId: taskSocketService.getCurrentProjectId.bind(taskSocketService)
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ export const UITaskStatusSchema = z.enum(['backlog', 'in-progress', 'review', 'c
|
|||||||
export const TaskPrioritySchema = z.enum(['low', 'medium', 'high', 'critical']);
|
export const TaskPrioritySchema = z.enum(['low', 'medium', 'high', 'critical']);
|
||||||
export const ProjectColorSchema = z.enum(['cyan', 'purple', 'pink', 'blue', 'orange', 'green']);
|
export const ProjectColorSchema = z.enum(['cyan', 'purple', 'pink', 'blue', 'orange', 'green']);
|
||||||
|
|
||||||
// Assignee schema - simplified to predefined options
|
// Assignee schema - allow any string value (backend no longer restricts this)
|
||||||
export const AssigneeSchema = z.enum(['User', 'Archon', 'AI IDE Agent']);
|
export const AssigneeSchema = z.string();
|
||||||
|
|
||||||
// Project schemas
|
// Project schemas
|
||||||
export const CreateProjectSchema = z.object({
|
export const CreateProjectSchema = z.object({
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import { KnowledgeTable } from '../components/knowledge-base/KnowledgeTable';
|
|||||||
import { KnowledgeItemCard } from '../components/knowledge-base/KnowledgeItemCard';
|
import { KnowledgeItemCard } from '../components/knowledge-base/KnowledgeItemCard';
|
||||||
import { GroupedKnowledgeItemCard } from '../components/knowledge-base/GroupedKnowledgeItemCard';
|
import { GroupedKnowledgeItemCard } from '../components/knowledge-base/GroupedKnowledgeItemCard';
|
||||||
import { KnowledgeGridSkeleton, KnowledgeTableSkeleton } from '../components/knowledge-base/KnowledgeItemSkeleton';
|
import { KnowledgeGridSkeleton, KnowledgeTableSkeleton } from '../components/knowledge-base/KnowledgeItemSkeleton';
|
||||||
|
import { DeleteConfirmModal } from '../components/ui/DeleteConfirmModal';
|
||||||
import { GroupCreationModal } from '../components/knowledge-base/GroupCreationModal';
|
import { GroupCreationModal } from '../components/knowledge-base/GroupCreationModal';
|
||||||
|
|
||||||
const extractDomain = (url: string): string => {
|
const extractDomain = (url: string): string => {
|
||||||
@@ -70,6 +71,10 @@ export const KnowledgeBasePage = () => {
|
|||||||
const [isSelectionMode, setIsSelectionMode] = useState(false);
|
const [isSelectionMode, setIsSelectionMode] = useState(false);
|
||||||
const [lastSelectedIndex, setLastSelectedIndex] = useState<number | null>(null);
|
const [lastSelectedIndex, setLastSelectedIndex] = useState<number | null>(null);
|
||||||
|
|
||||||
|
// Delete confirmation modal state
|
||||||
|
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||||
|
const [itemsToDelete, setItemsToDelete] = useState<{ count: number; items: Set<string> } | null>(null);
|
||||||
|
|
||||||
const { showToast } = useToast();
|
const { showToast } = useToast();
|
||||||
|
|
||||||
// Single consolidated loading function - only loads data, no filtering
|
// Single consolidated loading function - only loads data, no filtering
|
||||||
@@ -360,32 +365,43 @@ export const KnowledgeBasePage = () => {
|
|||||||
if (selectedItems.size === 0) return;
|
if (selectedItems.size === 0) return;
|
||||||
|
|
||||||
const count = selectedItems.size;
|
const count = selectedItems.size;
|
||||||
const confirmed = window.confirm(`Are you sure you want to delete ${count} selected item${count > 1 ? 's' : ''}?`);
|
setItemsToDelete({ count, items: new Set(selectedItems) });
|
||||||
|
setShowDeleteConfirm(true);
|
||||||
if (!confirmed) return;
|
};
|
||||||
|
|
||||||
|
const confirmDeleteItems = async () => {
|
||||||
|
if (!itemsToDelete) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Delete each selected item
|
// Delete each selected item
|
||||||
const deletePromises = Array.from(selectedItems).map(itemId =>
|
const deletePromises = Array.from(itemsToDelete.items).map(itemId =>
|
||||||
knowledgeBaseService.deleteKnowledgeItem(itemId)
|
knowledgeBaseService.deleteKnowledgeItem(itemId)
|
||||||
);
|
);
|
||||||
|
|
||||||
await Promise.all(deletePromises);
|
await Promise.all(deletePromises);
|
||||||
|
|
||||||
// Remove deleted items from state
|
// Remove deleted items from state
|
||||||
setKnowledgeItems(prev => prev.filter(item => !selectedItems.has(item.id)));
|
setKnowledgeItems(prev => prev.filter(item => !itemsToDelete.items.has(item.id)));
|
||||||
|
|
||||||
// Clear selection
|
// Clear selection
|
||||||
setSelectedItems(new Set());
|
setSelectedItems(new Set());
|
||||||
setIsSelectionMode(false);
|
setIsSelectionMode(false);
|
||||||
|
|
||||||
showToast(`Successfully deleted ${count} item${count > 1 ? 's' : ''}`, 'success');
|
showToast(`Successfully deleted ${itemsToDelete.count} item${itemsToDelete.count > 1 ? 's' : ''}`, 'success');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to delete selected items:', error);
|
console.error('Failed to delete selected items:', error);
|
||||||
showToast('Failed to delete some items', 'error');
|
showToast('Failed to delete some items', 'error');
|
||||||
|
} finally {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setItemsToDelete(null);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const cancelDeleteItems = () => {
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
setItemsToDelete(null);
|
||||||
|
};
|
||||||
|
|
||||||
// Keyboard shortcuts
|
// Keyboard shortcuts
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (e: KeyboardEvent) => {
|
const handleKeyDown = (e: KeyboardEvent) => {
|
||||||
@@ -1194,6 +1210,16 @@ export const KnowledgeBasePage = () => {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Delete Confirmation Modal */}
|
||||||
|
{showDeleteConfirm && itemsToDelete && (
|
||||||
|
<DeleteConfirmModal
|
||||||
|
itemName={`${itemsToDelete.count} selected item${itemsToDelete.count > 1 ? 's' : ''}`}
|
||||||
|
onConfirm={confirmDeleteItems}
|
||||||
|
onCancel={cancelDeleteItems}
|
||||||
|
type="knowledge-items"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>;
|
</div>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ import { DocsTab } from '../components/project-tasks/DocsTab';
|
|||||||
import { TasksTab } from '../components/project-tasks/TasksTab';
|
import { TasksTab } from '../components/project-tasks/TasksTab';
|
||||||
import { Button } from '../components/ui/Button';
|
import { Button } from '../components/ui/Button';
|
||||||
import { ChevronRight, ShoppingCart, Code, Briefcase, Layers, Plus, X, AlertCircle, Loader2, Heart, BarChart3, Trash2, Pin, ListTodo, Activity, CheckCircle2, Clipboard } from 'lucide-react';
|
import { ChevronRight, ShoppingCart, Code, Briefcase, Layers, Plus, X, AlertCircle, Loader2, Heart, BarChart3, Trash2, Pin, ListTodo, Activity, CheckCircle2, Clipboard } from 'lucide-react';
|
||||||
|
import { copyToClipboard } from '../utils/clipboard';
|
||||||
|
import { DeleteConfirmModal } from '../components/ui/DeleteConfirmModal';
|
||||||
|
|
||||||
// Import our service layer and types
|
// Import our service layer and types
|
||||||
import { projectService } from '../services/projectService';
|
import { projectService } from '../services/projectService';
|
||||||
@@ -365,12 +367,12 @@ export function ProjectPage({
|
|||||||
|
|
||||||
const tasksData = await projectService.getTasksByProject(projectId);
|
const tasksData = await projectService.getTasksByProject(projectId);
|
||||||
|
|
||||||
// Convert backend tasks to UI format
|
// Convert backend tasks to UI format with proper defaults
|
||||||
const uiTasks: Task[] = tasksData.map(task => ({
|
const uiTasks: Task[] = tasksData.map(task => ({
|
||||||
id: task.id,
|
id: task.id,
|
||||||
title: task.title,
|
title: task.title || '',
|
||||||
description: task.description,
|
description: task.description || '',
|
||||||
status: (task.uiStatus || 'backlog') as Task['status'],
|
status: (task.uiStatus || task.status || 'backlog') as Task['status'],
|
||||||
assignee: {
|
assignee: {
|
||||||
name: (task.assignee || 'User') as 'User' | 'Archon' | 'AI IDE Agent',
|
name: (task.assignee || 'User') as 'User' | 'Archon' | 'AI IDE Agent',
|
||||||
avatar: ''
|
avatar: ''
|
||||||
@@ -844,17 +846,21 @@ export function ProjectPage({
|
|||||||
|
|
||||||
{/* Copy Project ID Button */}
|
{/* Copy Project ID Button */}
|
||||||
<button
|
<button
|
||||||
onClick={(e) => {
|
onClick={async (e) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
navigator.clipboard.writeText(project.id);
|
const success = await copyToClipboard(project.id);
|
||||||
showToast('Project ID copied to clipboard', 'success');
|
if (success) {
|
||||||
// Visual feedback
|
showToast('Project ID copied to clipboard', 'success');
|
||||||
const button = e.currentTarget;
|
// Visual feedback
|
||||||
const originalHTML = button.innerHTML;
|
const button = e.currentTarget;
|
||||||
button.innerHTML = '<svg class="w-3 h-3 mr-1 inline" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"></path></svg>Copied!';
|
const originalHTML = button.innerHTML;
|
||||||
setTimeout(() => {
|
button.innerHTML = '<svg class="w-3 h-3 mr-1 inline" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"></path></svg>Copied!';
|
||||||
button.innerHTML = originalHTML;
|
setTimeout(() => {
|
||||||
}, 2000);
|
button.innerHTML = originalHTML;
|
||||||
|
}, 2000);
|
||||||
|
} else {
|
||||||
|
showToast('Failed to copy Project ID', 'error');
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
className="flex-1 flex items-center justify-center gap-1 text-xs text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 transition-colors py-1"
|
className="flex-1 flex items-center justify-center gap-1 text-xs text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 transition-colors py-1"
|
||||||
title="Copy Project ID to clipboard"
|
title="Copy Project ID to clipboard"
|
||||||
@@ -1057,76 +1063,3 @@ export function ProjectPage({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reusable Delete Confirmation Modal Component
|
|
||||||
export interface DeleteConfirmModalProps {
|
|
||||||
itemName: string;
|
|
||||||
onConfirm: () => void;
|
|
||||||
onCancel: () => void;
|
|
||||||
type: 'project' | 'task' | 'client';
|
|
||||||
}
|
|
||||||
|
|
||||||
export const DeleteConfirmModal: React.FC<DeleteConfirmModalProps> = ({ itemName, onConfirm, onCancel, type }) => {
|
|
||||||
const getTitle = () => {
|
|
||||||
switch (type) {
|
|
||||||
case 'project': return 'Delete Project';
|
|
||||||
case 'task': return 'Delete Task';
|
|
||||||
case 'client': return 'Delete MCP Client';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getMessage = () => {
|
|
||||||
switch (type) {
|
|
||||||
case 'project': return `Are you sure you want to delete the "${itemName}" project? This will also delete all associated tasks and documents and cannot be undone.`;
|
|
||||||
case 'task': return `Are you sure you want to delete the "${itemName}" task? This action cannot be undone.`;
|
|
||||||
case 'client': return `Are you sure you want to delete the "${itemName}" client? This will permanently remove its configuration and cannot be undone.`;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm flex items-center justify-center z-50">
|
|
||||||
<div className="relative p-6 rounded-md backdrop-blur-md w-full max-w-md
|
|
||||||
bg-gradient-to-b from-white/80 to-white/60 dark:from-white/10 dark:to-black/30
|
|
||||||
border border-gray-200 dark:border-zinc-800/50
|
|
||||||
shadow-[0_10px_30px_-15px_rgba(0,0,0,0.1)] dark:shadow-[0_10px_30px_-15px_rgba(0,0,0,0.7)]
|
|
||||||
before:content-[''] before:absolute before:top-0 before:left-0 before:right-0 before:h-[2px]
|
|
||||||
before:rounded-t-[4px] before:bg-red-500
|
|
||||||
before:shadow-[0_0_10px_2px_rgba(239,68,68,0.4)] dark:before:shadow-[0_0_20px_5px_rgba(239,68,68,0.7)]">
|
|
||||||
|
|
||||||
<div className="relative z-10">
|
|
||||||
<div className="flex items-center gap-3 mb-4">
|
|
||||||
<div className="w-12 h-12 rounded-full bg-red-100 dark:bg-red-900/30 flex items-center justify-center">
|
|
||||||
<Trash2 className="w-6 h-6 text-red-600 dark:text-red-400" />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3 className="text-lg font-semibold text-gray-800 dark:text-white">
|
|
||||||
{getTitle()}
|
|
||||||
</h3>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
|
||||||
This action cannot be undone
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p className="text-gray-700 dark:text-gray-300 mb-6">
|
|
||||||
{getMessage()}
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div className="flex justify-end gap-3">
|
|
||||||
<button
|
|
||||||
onClick={onCancel}
|
|
||||||
className="px-4 py-2 text-gray-600 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 transition-colors"
|
|
||||||
>
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
onClick={onConfirm}
|
|
||||||
className="px-4 py-2 bg-red-600 hover:bg-red-700 text-white rounded-lg transition-colors shadow-lg shadow-red-600/25 hover:shadow-red-700/25"
|
|
||||||
>
|
|
||||||
Delete
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -8,7 +8,6 @@ import {
|
|||||||
Key,
|
Key,
|
||||||
Brain,
|
Brain,
|
||||||
Code,
|
Code,
|
||||||
Activity,
|
|
||||||
FileCode,
|
FileCode,
|
||||||
Bug,
|
Bug,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
@@ -20,7 +19,6 @@ import { FeaturesSection } from "../components/settings/FeaturesSection";
|
|||||||
import { APIKeysSection } from "../components/settings/APIKeysSection";
|
import { APIKeysSection } from "../components/settings/APIKeysSection";
|
||||||
import { RAGSettings } from "../components/settings/RAGSettings";
|
import { RAGSettings } from "../components/settings/RAGSettings";
|
||||||
import { CodeExtractionSettings } from "../components/settings/CodeExtractionSettings";
|
import { CodeExtractionSettings } from "../components/settings/CodeExtractionSettings";
|
||||||
import { TestStatus } from "../components/settings/TestStatus";
|
|
||||||
import { IDEGlobalRules } from "../components/settings/IDEGlobalRules";
|
import { IDEGlobalRules } from "../components/settings/IDEGlobalRules";
|
||||||
import { ButtonPlayground } from "../components/settings/ButtonPlayground";
|
import { ButtonPlayground } from "../components/settings/ButtonPlayground";
|
||||||
import { CollapsibleSettingsCard } from "../components/ui/CollapsibleSettingsCard";
|
import { CollapsibleSettingsCard } from "../components/ui/CollapsibleSettingsCard";
|
||||||
@@ -151,15 +149,31 @@ export const SettingsPage = () => {
|
|||||||
</CollapsibleSettingsCard>
|
</CollapsibleSettingsCard>
|
||||||
</motion.div>
|
</motion.div>
|
||||||
)}
|
)}
|
||||||
|
{/* Bug Report Section - Moved to left column */}
|
||||||
<motion.div variants={itemVariants}>
|
<motion.div variants={itemVariants}>
|
||||||
<CollapsibleSettingsCard
|
<CollapsibleSettingsCard
|
||||||
title="Test Status"
|
title="Bug Reporting"
|
||||||
icon={Activity}
|
icon={Bug}
|
||||||
accentColor="cyan"
|
iconColor="text-red-500"
|
||||||
storageKey="test-status"
|
borderColor="border-red-200 dark:border-red-800"
|
||||||
defaultExpanded={true}
|
defaultExpanded={false}
|
||||||
>
|
>
|
||||||
<TestStatus />
|
<div className="space-y-4">
|
||||||
|
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||||
|
Found a bug or issue? Report it to help improve Archon V2
|
||||||
|
Alpha.
|
||||||
|
</p>
|
||||||
|
<div className="flex justify-start">
|
||||||
|
<BugReportButton variant="secondary" size="md">
|
||||||
|
Report Bug
|
||||||
|
</BugReportButton>
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-gray-500 dark:text-gray-400 space-y-1">
|
||||||
|
<p>• Bug reports are sent directly to GitHub Issues</p>
|
||||||
|
<p>• System context is automatically collected</p>
|
||||||
|
<p>• Your privacy is protected - no personal data is sent</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</CollapsibleSettingsCard>
|
</CollapsibleSettingsCard>
|
||||||
</motion.div>
|
</motion.div>
|
||||||
</div>
|
</div>
|
||||||
@@ -205,34 +219,6 @@ export const SettingsPage = () => {
|
|||||||
/>
|
/>
|
||||||
</CollapsibleSettingsCard>
|
</CollapsibleSettingsCard>
|
||||||
</motion.div>
|
</motion.div>
|
||||||
|
|
||||||
{/* Bug Report Section */}
|
|
||||||
<motion.div variants={itemVariants}>
|
|
||||||
<CollapsibleSettingsCard
|
|
||||||
title="Bug Reporting"
|
|
||||||
icon={Bug}
|
|
||||||
iconColor="text-red-500"
|
|
||||||
borderColor="border-red-200 dark:border-red-800"
|
|
||||||
defaultExpanded={false}
|
|
||||||
>
|
|
||||||
<div className="space-y-4">
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
|
||||||
Found a bug or issue? Report it to help improve Archon V2
|
|
||||||
Alpha.
|
|
||||||
</p>
|
|
||||||
<div className="flex justify-start">
|
|
||||||
<BugReportButton variant="secondary" size="md">
|
|
||||||
Report Bug
|
|
||||||
</BugReportButton>
|
|
||||||
</div>
|
|
||||||
<div className="text-xs text-gray-500 dark:text-gray-400 space-y-1">
|
|
||||||
<p>• Bug reports are sent directly to GitHub Issues</p>
|
|
||||||
<p>• System context is automatically collected</p>
|
|
||||||
<p>• Your privacy is protected - no personal data is sent</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CollapsibleSettingsCard>
|
|
||||||
</motion.div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
213
archon-ui-main/src/schemas/project.schemas.ts
Normal file
213
archon-ui-main/src/schemas/project.schemas.ts
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
/**
|
||||||
|
* Zod schemas for runtime validation of project-related data
|
||||||
|
* These schemas ensure type safety when receiving data from the backend
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for project document in JSONB field
|
||||||
|
*/
|
||||||
|
export const ProjectDocumentSchema = z.object({
|
||||||
|
type: z.literal('document'),
|
||||||
|
id: z.string(),
|
||||||
|
title: z.string(),
|
||||||
|
content: z.string(),
|
||||||
|
metadata: z.record(z.unknown()),
|
||||||
|
created_at: z.string().optional(),
|
||||||
|
updated_at: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for project feature in JSONB field
|
||||||
|
*/
|
||||||
|
export const ProjectFeatureSchema = z.object({
|
||||||
|
type: z.literal('feature'),
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
status: z.enum(['planned', 'in-progress', 'completed']),
|
||||||
|
description: z.string(),
|
||||||
|
priority: z.number().optional(),
|
||||||
|
assignee: z.string().optional(),
|
||||||
|
created_at: z.string().optional(),
|
||||||
|
updated_at: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for project data in JSONB field
|
||||||
|
*/
|
||||||
|
export const ProjectDataSchema = z.object({
|
||||||
|
type: z.literal('data'),
|
||||||
|
key: z.string(),
|
||||||
|
value: z.unknown(),
|
||||||
|
timestamp: z.string(),
|
||||||
|
source: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for task source references
|
||||||
|
*/
|
||||||
|
export const TaskSourceSchema = z.object({
|
||||||
|
url: z.string().optional(),
|
||||||
|
file: z.string().optional(),
|
||||||
|
type: z.enum(['documentation', 'code', 'internal_docs', 'external']),
|
||||||
|
relevance: z.string().optional(),
|
||||||
|
title: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for task code examples
|
||||||
|
*/
|
||||||
|
export const TaskCodeExampleSchema = z.object({
|
||||||
|
file: z.string(),
|
||||||
|
function: z.string().optional(),
|
||||||
|
class: z.string().optional(),
|
||||||
|
purpose: z.string(),
|
||||||
|
language: z.string().optional(),
|
||||||
|
snippet: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for creation progress tracking
|
||||||
|
*/
|
||||||
|
export const CreationProgressSchema = z.object({
|
||||||
|
progressId: z.string(),
|
||||||
|
status: z.enum([
|
||||||
|
'starting',
|
||||||
|
'initializing_agents',
|
||||||
|
'generating_docs',
|
||||||
|
'processing_requirements',
|
||||||
|
'ai_generation',
|
||||||
|
'finalizing_docs',
|
||||||
|
'saving_to_database',
|
||||||
|
'completed',
|
||||||
|
'error'
|
||||||
|
]),
|
||||||
|
percentage: z.number(),
|
||||||
|
logs: z.array(z.string()),
|
||||||
|
error: z.string().optional(),
|
||||||
|
step: z.string().optional(),
|
||||||
|
currentStep: z.string().optional(),
|
||||||
|
eta: z.string().optional(),
|
||||||
|
duration: z.string().optional(),
|
||||||
|
project: z.lazy(() => ProjectSchema).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main Project schema
|
||||||
|
*/
|
||||||
|
export const ProjectSchema = z.object({
|
||||||
|
id: z.string(),
|
||||||
|
title: z.string().min(1),
|
||||||
|
prd: z.record(z.unknown()).optional(),
|
||||||
|
docs: z.array(ProjectDocumentSchema).optional(),
|
||||||
|
features: z.array(ProjectFeatureSchema).optional(),
|
||||||
|
data: z.array(ProjectDataSchema).optional(),
|
||||||
|
github_repo: z.string().optional(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
technical_sources: z.array(z.string()).optional(),
|
||||||
|
business_sources: z.array(z.string()).optional(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
progress: z.number().optional(),
|
||||||
|
updated: z.string().optional(),
|
||||||
|
pinned: z.boolean(),
|
||||||
|
creationProgress: CreationProgressSchema.optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for Task
|
||||||
|
*/
|
||||||
|
export const TaskSchema = z.object({
|
||||||
|
id: z.string(),
|
||||||
|
project_id: z.string(),
|
||||||
|
title: z.string().min(1),
|
||||||
|
description: z.string().optional(),
|
||||||
|
status: z.enum(['todo', 'doing', 'review', 'done']),
|
||||||
|
assignee: z.string(),
|
||||||
|
task_order: z.number(),
|
||||||
|
feature: z.string().optional(),
|
||||||
|
sources: z.array(TaskSourceSchema).optional(),
|
||||||
|
code_examples: z.array(TaskCodeExampleSchema).optional(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for Create Task DTO
|
||||||
|
*/
|
||||||
|
export const CreateTaskDtoSchema = z.object({
|
||||||
|
title: z.string().min(1),
|
||||||
|
description: z.string().optional(),
|
||||||
|
status: z.enum(['todo', 'doing', 'review', 'done']).default('todo'),
|
||||||
|
assignee: z.string().default('User'),
|
||||||
|
task_order: z.number().optional(),
|
||||||
|
feature: z.string().optional(),
|
||||||
|
sources: z.array(TaskSourceSchema).optional(),
|
||||||
|
code_examples: z.array(TaskCodeExampleSchema).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for Update Task DTO
|
||||||
|
*/
|
||||||
|
export const UpdateTaskDtoSchema = z.object({
|
||||||
|
title: z.string().min(1).optional(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
status: z.enum(['todo', 'doing', 'review', 'done']).optional(),
|
||||||
|
assignee: z.string().optional(),
|
||||||
|
task_order: z.number().optional(),
|
||||||
|
feature: z.string().optional(),
|
||||||
|
sources: z.array(TaskSourceSchema).optional(),
|
||||||
|
code_examples: z.array(TaskCodeExampleSchema).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema for task reorder data
|
||||||
|
*/
|
||||||
|
export const ReorderDataSchema = z.object({
|
||||||
|
tasks: z.array(z.object({
|
||||||
|
id: z.string(),
|
||||||
|
task_order: z.number(),
|
||||||
|
})),
|
||||||
|
sourceIndex: z.number().optional(),
|
||||||
|
destinationIndex: z.number().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type exports inferred from schemas
|
||||||
|
*/
|
||||||
|
export type Project = z.infer<typeof ProjectSchema>;
|
||||||
|
export type Task = z.infer<typeof TaskSchema>;
|
||||||
|
export type CreateTaskDto = z.infer<typeof CreateTaskDtoSchema>;
|
||||||
|
export type UpdateTaskDto = z.infer<typeof UpdateTaskDtoSchema>;
|
||||||
|
export type ReorderData = z.infer<typeof ReorderDataSchema>;
|
||||||
|
export type CreationProgress = z.infer<typeof CreationProgressSchema>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validation functions
|
||||||
|
*/
|
||||||
|
export function validateProject(data: unknown): Project {
|
||||||
|
return ProjectSchema.parse(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function safeParseProject(data: unknown): Project | null {
|
||||||
|
const result = ProjectSchema.safeParse(data);
|
||||||
|
if (result.success) {
|
||||||
|
return result.data;
|
||||||
|
}
|
||||||
|
console.error('Project validation failed:', result.error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateTask(data: unknown): Task {
|
||||||
|
return TaskSchema.parse(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function safeParseTask(data: unknown): Task | null {
|
||||||
|
const result = TaskSchema.safeParse(data);
|
||||||
|
if (result.success) {
|
||||||
|
return result.data;
|
||||||
|
}
|
||||||
|
console.error('Task validation failed:', result.error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
@@ -36,7 +36,7 @@ export interface CrawlProgressData {
|
|||||||
currentStep?: string;
|
currentStep?: string;
|
||||||
logs?: string[];
|
logs?: string[];
|
||||||
log?: string;
|
log?: string;
|
||||||
workers?: WorkerProgress[] | any[]; // Updated to support new worker format
|
workers?: WorkerProgress[]; // Updated to support new worker format
|
||||||
error?: string;
|
error?: string;
|
||||||
completed?: boolean;
|
completed?: boolean;
|
||||||
// Additional properties for document upload and crawling
|
// Additional properties for document upload and crawling
|
||||||
@@ -50,6 +50,7 @@ export interface CrawlProgressData {
|
|||||||
wordCount?: number;
|
wordCount?: number;
|
||||||
duration?: string;
|
duration?: string;
|
||||||
sourceId?: string;
|
sourceId?: string;
|
||||||
|
codeExamplesCount?: number;
|
||||||
// Original crawl parameters for retry functionality
|
// Original crawl parameters for retry functionality
|
||||||
originalCrawlParams?: {
|
originalCrawlParams?: {
|
||||||
url: string;
|
url: string;
|
||||||
@@ -98,7 +99,7 @@ interface StreamProgressOptions {
|
|||||||
connectionTimeout?: number;
|
connectionTimeout?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
type ProgressCallback = (data: any) => void;
|
type ProgressCallback = (data: CrawlProgressData) => void;
|
||||||
|
|
||||||
class CrawlProgressService {
|
class CrawlProgressService {
|
||||||
private wsService: WebSocketService = knowledgeSocketIO;
|
private wsService: WebSocketService = knowledgeSocketIO;
|
||||||
@@ -115,6 +116,9 @@ class CrawlProgressService {
|
|||||||
options: StreamProgressOptions = {}
|
options: StreamProgressOptions = {}
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
console.log(`🚀 Starting Socket.IO progress stream for ${progressId}`);
|
console.log(`🚀 Starting Socket.IO progress stream for ${progressId}`);
|
||||||
|
|
||||||
|
// Store the active crawl progress ID in localStorage for reconnection
|
||||||
|
localStorage.setItem('activeCrawlProgressId', progressId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Ensure we're connected to Socket.IO
|
// Ensure we're connected to Socket.IO
|
||||||
@@ -141,7 +145,7 @@ class CrawlProgressService {
|
|||||||
}, 5000); // 5 second timeout for acknowledgment
|
}, 5000); // 5 second timeout for acknowledgment
|
||||||
|
|
||||||
// Listen for subscription acknowledgment
|
// Listen for subscription acknowledgment
|
||||||
const ackHandler = (message: any) => {
|
const ackHandler = (message: { data?: Record<string, unknown>; progress_id?: string; status?: string }) => {
|
||||||
const data = message.data || message;
|
const data = message.data || message;
|
||||||
console.log(`📨 Received acknowledgment:`, data);
|
console.log(`📨 Received acknowledgment:`, data);
|
||||||
if (data.progress_id === progressId && data.status === 'subscribed') {
|
if (data.progress_id === progressId && data.status === 'subscribed') {
|
||||||
@@ -156,7 +160,7 @@ class CrawlProgressService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Create a specific handler for this progressId
|
// Create a specific handler for this progressId
|
||||||
const progressHandler = (message: any) => {
|
const progressHandler = (message: { data?: CrawlProgressData; progressId?: string }) => {
|
||||||
console.log(`📨 [${progressId}] Raw message received:`, message);
|
console.log(`📨 [${progressId}] Raw message received:`, message);
|
||||||
const data = message.data || message;
|
const data = message.data || message;
|
||||||
console.log(`📨 [${progressId}] Extracted data:`, data);
|
console.log(`📨 [${progressId}] Extracted data:`, data);
|
||||||
@@ -185,6 +189,8 @@ class CrawlProgressService {
|
|||||||
console.log(`✅ Crawl completed for ${progressId}`);
|
console.log(`✅ Crawl completed for ${progressId}`);
|
||||||
if (data.progressId === progressId) {
|
if (data.progressId === progressId) {
|
||||||
onMessage({ ...data, completed: true });
|
onMessage({ ...data, completed: true });
|
||||||
|
// Clear the stored progress ID when crawl completes
|
||||||
|
localStorage.removeItem('activeCrawlProgressId');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -197,6 +203,8 @@ class CrawlProgressService {
|
|||||||
error: message.data?.message || message.error || 'Unknown error',
|
error: message.data?.message || message.error || 'Unknown error',
|
||||||
percentage: 0
|
percentage: 0
|
||||||
});
|
});
|
||||||
|
// Clear the stored progress ID on error
|
||||||
|
localStorage.removeItem('activeCrawlProgressId');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -298,6 +306,12 @@ class CrawlProgressService {
|
|||||||
|
|
||||||
// Remove from active subscriptions
|
// Remove from active subscriptions
|
||||||
this.activeSubscriptions.delete(progressId);
|
this.activeSubscriptions.delete(progressId);
|
||||||
|
|
||||||
|
// Clear from localStorage if this is the active crawl
|
||||||
|
const storedId = localStorage.getItem('activeCrawlProgressId');
|
||||||
|
if (storedId === progressId) {
|
||||||
|
localStorage.removeItem('activeCrawlProgressId');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -378,8 +392,8 @@ class CrawlProgressService {
|
|||||||
progressId: string,
|
progressId: string,
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onMessage: ProgressCallback;
|
onMessage: ProgressCallback;
|
||||||
onStateChange?: (state: any) => void;
|
onStateChange?: (state: string) => void;
|
||||||
onError?: (error: any) => void;
|
onError?: (error: Error) => void;
|
||||||
},
|
},
|
||||||
options: StreamProgressOptions = {}
|
options: StreamProgressOptions = {}
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { io, Socket } from 'socket.io-client';
|
import { io, Socket } from 'socket.io-client';
|
||||||
|
import { OperationTracker, OperationResult } from '../utils/operationTracker';
|
||||||
|
|
||||||
export enum WebSocketState {
|
export enum WebSocketState {
|
||||||
CONNECTING = 'CONNECTING',
|
CONNECTING = 'CONNECTING',
|
||||||
@@ -33,9 +34,9 @@ export interface WebSocketConfig {
|
|||||||
|
|
||||||
export interface WebSocketMessage {
|
export interface WebSocketMessage {
|
||||||
type: string;
|
type: string;
|
||||||
data?: any;
|
data?: unknown;
|
||||||
timestamp?: string;
|
timestamp?: string;
|
||||||
[key: string]: any;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
type MessageHandler = (message: WebSocketMessage) => void;
|
type MessageHandler = (message: WebSocketMessage) => void;
|
||||||
@@ -57,8 +58,12 @@ export class WebSocketService {
|
|||||||
private _state: WebSocketState = WebSocketState.DISCONNECTED;
|
private _state: WebSocketState = WebSocketState.DISCONNECTED;
|
||||||
|
|
||||||
// Deduplication support
|
// Deduplication support
|
||||||
private lastMessages: Map<string, { data: any; timestamp: number }> = new Map();
|
private lastMessages: Map<string, { data: unknown; timestamp: number }> = new Map();
|
||||||
private deduplicationWindow = 100; // 100ms window
|
private deduplicationWindow = 100; // 100ms window
|
||||||
|
|
||||||
|
// Operation tracking support
|
||||||
|
private operationTracker: OperationTracker | null = null;
|
||||||
|
private operationHandlers: Map<string, (result: OperationResult) => void> = new Map();
|
||||||
|
|
||||||
constructor(config: WebSocketConfig = {}) {
|
constructor(config: WebSocketConfig = {}) {
|
||||||
this.config = {
|
this.config = {
|
||||||
@@ -215,9 +220,9 @@ export class WebSocketService {
|
|||||||
|
|
||||||
this.socket.on('connect_error', (error: Error) => {
|
this.socket.on('connect_error', (error: Error) => {
|
||||||
console.error('❌ Socket.IO connection error:', error);
|
console.error('❌ Socket.IO connection error:', error);
|
||||||
console.error('❌ Error type:', (error as any).type);
|
console.error('❌ Error type:', (error as unknown as Record<string, unknown>).type);
|
||||||
console.error('❌ Error message:', error.message);
|
console.error('❌ Error message:', error.message);
|
||||||
console.error('❌ Socket transport:', this.socket?.io?.engine?.transport?.name);
|
console.error('❌ Socket transport:', (this.socket as unknown as { io?: { engine?: { transport?: { name?: string } } } })?.io?.engine?.transport?.name);
|
||||||
this.notifyError(error);
|
this.notifyError(error);
|
||||||
|
|
||||||
// Reject connection promise if still pending
|
// Reject connection promise if still pending
|
||||||
@@ -244,13 +249,20 @@ export class WebSocketService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Handle incoming messages
|
// Handle incoming messages
|
||||||
this.socket.onAny((eventName: string, ...args: any[]) => {
|
this.socket.onAny((eventName: string, ...args: unknown[]) => {
|
||||||
// Skip internal Socket.IO events
|
// Skip internal Socket.IO events
|
||||||
if (eventName.startsWith('connect') || eventName.startsWith('disconnect') ||
|
if (eventName.startsWith('connect') || eventName.startsWith('disconnect') ||
|
||||||
eventName.startsWith('reconnect') || eventName === 'error') {
|
eventName.startsWith('reconnect') || eventName === 'error') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for operation responses
|
||||||
|
if (eventName === 'operation_response' && args[0]) {
|
||||||
|
const response = args[0] as { operationId: string; success: boolean; data?: unknown; error?: string };
|
||||||
|
this.handleOperationResponse(response);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Convert Socket.IO event to WebSocket message format
|
// Convert Socket.IO event to WebSocket message format
|
||||||
const message: WebSocketMessage = {
|
const message: WebSocketMessage = {
|
||||||
type: eventName,
|
type: eventName,
|
||||||
@@ -264,11 +276,16 @@ export class WebSocketService {
|
|||||||
Object.assign(message, args[0]);
|
Object.assign(message, args[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use unified message processing check
|
||||||
|
if (!this.shouldProcessMessage(message)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
this.handleMessage(message);
|
this.handleMessage(message);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private isDuplicateMessage(type: string, data: any): boolean {
|
private isDuplicateMessage(type: string, data: unknown): boolean {
|
||||||
const lastMessage = this.lastMessages.get(type);
|
const lastMessage = this.lastMessages.get(type);
|
||||||
if (!lastMessage) return false;
|
if (!lastMessage) return false;
|
||||||
|
|
||||||
@@ -288,11 +305,6 @@ export class WebSocketService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private handleMessage(message: WebSocketMessage): void {
|
private handleMessage(message: WebSocketMessage): void {
|
||||||
// Add deduplication check
|
|
||||||
if (this.isDuplicateMessage(message.type, message.data)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store message for deduplication
|
// Store message for deduplication
|
||||||
this.lastMessages.set(message.type, {
|
this.lastMessages.set(message.type, {
|
||||||
data: message.data,
|
data: message.data,
|
||||||
@@ -394,29 +406,170 @@ export class WebSocketService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send a message via Socket.IO
|
* Send a message via Socket.IO with optional operation tracking
|
||||||
*/
|
*/
|
||||||
send(data: any): boolean {
|
send(data: unknown, trackOperation?: boolean): boolean | string {
|
||||||
if (!this.isConnected()) {
|
if (!this.isConnected()) {
|
||||||
console.warn('Cannot send message: Socket.IO not connected');
|
console.warn('Cannot send message: Socket.IO not connected');
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
let operationId: string | undefined;
|
||||||
|
|
||||||
|
// Track operation if requested
|
||||||
|
if (trackOperation && this.operationTracker) {
|
||||||
|
const messageData = data as { type?: string };
|
||||||
|
operationId = this.operationTracker.createOperation(
|
||||||
|
messageData.type || 'message',
|
||||||
|
data
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add operation ID to the message
|
||||||
|
const trackedData = { ...messageData, operationId };
|
||||||
|
data = trackedData;
|
||||||
|
}
|
||||||
|
|
||||||
// For Socket.IO, we emit events based on message type
|
// For Socket.IO, we emit events based on message type
|
||||||
if (data.type) {
|
const messageData = data as { type?: string; data?: unknown };
|
||||||
this.socket!.emit(data.type, data.data || data);
|
if (messageData.type) {
|
||||||
|
this.socket!.emit(messageData.type, messageData.data || data);
|
||||||
} else {
|
} else {
|
||||||
// Default message event
|
// Default message event
|
||||||
this.socket!.emit('message', data);
|
this.socket!.emit('message', data);
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
|
return operationId || true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to send message:', error);
|
console.error('Failed to send message:', error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Enhanced emit method with automatic operation ID tracking for echo suppression
|
||||||
|
private pendingOperations = new Map<string, NodeJS.Timeout>();
|
||||||
|
|
||||||
|
emit(event: string, data: unknown): string {
|
||||||
|
const operationId = crypto.randomUUID();
|
||||||
|
const payload = { ...(typeof data === 'object' && data !== null ? data : {}), operationId };
|
||||||
|
|
||||||
|
// Track pending operation
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
this.pendingOperations.delete(operationId);
|
||||||
|
}, 5000);
|
||||||
|
this.pendingOperations.set(operationId, timeout);
|
||||||
|
|
||||||
|
// Emit with operation ID
|
||||||
|
if (this.socket) {
|
||||||
|
this.socket.emit(event, payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
return operationId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a tracked operation and wait for response
|
||||||
|
*/
|
||||||
|
async sendTrackedOperation(data: unknown, timeout?: number): Promise<OperationResult> {
|
||||||
|
if (!this.operationTracker) {
|
||||||
|
throw new Error('Operation tracking not enabled');
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageData = data as { type?: string };
|
||||||
|
const operationId = this.operationTracker.createOperation(
|
||||||
|
messageData.type || 'message',
|
||||||
|
data
|
||||||
|
);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Set up operation handler
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
this.operationHandlers.delete(operationId);
|
||||||
|
const result = this.operationTracker!.failOperation(
|
||||||
|
operationId,
|
||||||
|
'Operation timed out'
|
||||||
|
);
|
||||||
|
reject(new Error(result.error));
|
||||||
|
}, timeout || 30000);
|
||||||
|
|
||||||
|
this.operationHandlers.set(operationId, (result: OperationResult) => {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
this.operationHandlers.delete(operationId);
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
resolve(result);
|
||||||
|
} else {
|
||||||
|
reject(new Error(result.error || 'Operation failed'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send the tracked message
|
||||||
|
const trackedData = { ...messageData, operationId };
|
||||||
|
const sent = this.send(trackedData, false); // Don't double-track
|
||||||
|
|
||||||
|
if (!sent) {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
this.operationHandlers.delete(operationId);
|
||||||
|
reject(new Error('Failed to send message'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle operation response from server
|
||||||
|
*/
|
||||||
|
private handleOperationResponse(response: {
|
||||||
|
operationId: string;
|
||||||
|
success: boolean;
|
||||||
|
data?: unknown;
|
||||||
|
error?: string;
|
||||||
|
}): void {
|
||||||
|
if (!this.operationTracker) return;
|
||||||
|
|
||||||
|
const result = response.success
|
||||||
|
? this.operationTracker.completeOperation(response.operationId, response.data)
|
||||||
|
: this.operationTracker.failOperation(response.operationId, response.error || 'Unknown error');
|
||||||
|
|
||||||
|
// Notify handler if exists
|
||||||
|
const handler = this.operationHandlers.get(response.operationId);
|
||||||
|
if (handler) {
|
||||||
|
handler(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unified method to check if a message should be processed
|
||||||
|
* Consolidates echo suppression and deduplication logic
|
||||||
|
*/
|
||||||
|
private shouldProcessMessage(message: WebSocketMessage): boolean {
|
||||||
|
// Check for operation ID echo suppression
|
||||||
|
if (message.data && typeof message.data === 'object' && 'operationId' in message.data) {
|
||||||
|
const operationId = (message.data as Record<string, unknown>).operationId as string;
|
||||||
|
|
||||||
|
// Check pending operations map first (for immediate echoes)
|
||||||
|
if (this.pendingOperations.has(operationId)) {
|
||||||
|
const timeout = this.pendingOperations.get(operationId);
|
||||||
|
if (timeout) clearTimeout(timeout);
|
||||||
|
this.pendingOperations.delete(operationId);
|
||||||
|
console.log(`[Socket] Suppressing echo for pending operation ${operationId}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check operation tracker (for tracked operations)
|
||||||
|
if (this.operationTracker?.shouldSuppress(operationId)) {
|
||||||
|
console.log(`[Socket] Suppressing tracked operation ${operationId}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicate messages
|
||||||
|
if (this.isDuplicateMessage(message.type, message.data)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wait for connection to be established
|
* Wait for connection to be established
|
||||||
*/
|
*/
|
||||||
@@ -462,6 +615,38 @@ export class WebSocketService {
|
|||||||
this.deduplicationWindow = windowMs;
|
this.deduplicationWindow = windowMs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable operation tracking
|
||||||
|
*/
|
||||||
|
enableOperationTracking(timeout?: number): void {
|
||||||
|
if (!this.operationTracker) {
|
||||||
|
this.operationTracker = new OperationTracker(timeout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disable operation tracking
|
||||||
|
*/
|
||||||
|
disableOperationTracking(): void {
|
||||||
|
if (this.operationTracker) {
|
||||||
|
this.operationTracker.destroy();
|
||||||
|
this.operationTracker = null;
|
||||||
|
this.operationHandlers.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get operation tracking statistics
|
||||||
|
*/
|
||||||
|
getOperationStats(): {
|
||||||
|
total: number;
|
||||||
|
pending: number;
|
||||||
|
completed: number;
|
||||||
|
failed: number;
|
||||||
|
} | null {
|
||||||
|
return this.operationTracker?.getStats() || null;
|
||||||
|
}
|
||||||
|
|
||||||
disconnect(): void {
|
disconnect(): void {
|
||||||
this.setState(WebSocketState.DISCONNECTED);
|
this.setState(WebSocketState.DISCONNECTED);
|
||||||
|
|
||||||
@@ -478,6 +663,13 @@ export class WebSocketService {
|
|||||||
this.connectionResolver = null;
|
this.connectionResolver = null;
|
||||||
this.connectionRejector = null;
|
this.connectionRejector = null;
|
||||||
this.lastMessages.clear(); // Clear deduplication cache
|
this.lastMessages.clear(); // Clear deduplication cache
|
||||||
|
|
||||||
|
// Clean up operation tracking
|
||||||
|
if (this.operationTracker) {
|
||||||
|
this.operationTracker.destroy();
|
||||||
|
this.operationTracker = null;
|
||||||
|
}
|
||||||
|
this.operationHandlers.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -486,9 +678,15 @@ export function createWebSocketService(config?: WebSocketConfig): WebSocketServi
|
|||||||
return new WebSocketService(config);
|
return new WebSocketService(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export singleton instances for different features
|
// Create a SINGLE shared WebSocket instance to prevent multiple connections
|
||||||
export const knowledgeSocketIO = new WebSocketService();
|
// This fixes the socket disconnection issue when switching tabs
|
||||||
|
const sharedSocketInstance = new WebSocketService();
|
||||||
|
|
||||||
// Export instances for backward compatibility
|
// Export the SAME instance with different names for backward compatibility
|
||||||
export const taskUpdateSocketIO = new WebSocketService();
|
// This ensures only ONE Socket.IO connection is created and shared across all features
|
||||||
export const projectListSocketIO = new WebSocketService();
|
export const knowledgeSocketIO = sharedSocketInstance;
|
||||||
|
export const taskUpdateSocketIO = sharedSocketInstance;
|
||||||
|
export const projectListSocketIO = sharedSocketInstance;
|
||||||
|
|
||||||
|
// Export as default for new code
|
||||||
|
export default sharedSocketInstance;
|
||||||
@@ -13,7 +13,8 @@
|
|||||||
* - Proper session identification
|
* - Proper session identification
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WebSocketService, WebSocketState } from './socketIOService';
|
import { WebSocketState } from './socketIOService';
|
||||||
|
import sharedSocketInstance from './socketIOService';
|
||||||
|
|
||||||
export interface Task {
|
export interface Task {
|
||||||
id: string;
|
id: string;
|
||||||
@@ -38,7 +39,7 @@ export interface TaskSocketEvents {
|
|||||||
|
|
||||||
class TaskSocketService {
|
class TaskSocketService {
|
||||||
private static instance: TaskSocketService | null = null;
|
private static instance: TaskSocketService | null = null;
|
||||||
private socketService: WebSocketService;
|
private socketService: typeof sharedSocketInstance;
|
||||||
private currentProjectId: string | null = null;
|
private currentProjectId: string | null = null;
|
||||||
private eventHandlers: Map<string, TaskSocketEvents> = new Map();
|
private eventHandlers: Map<string, TaskSocketEvents> = new Map();
|
||||||
private connectionPromise: Promise<void> | null = null;
|
private connectionPromise: Promise<void> | null = null;
|
||||||
@@ -47,13 +48,11 @@ class TaskSocketService {
|
|||||||
private connectionCooldown = 1000; // 1 second cooldown between connection attempts
|
private connectionCooldown = 1000; // 1 second cooldown between connection attempts
|
||||||
|
|
||||||
private constructor() {
|
private constructor() {
|
||||||
this.socketService = new WebSocketService({
|
// Use the shared socket instance instead of creating a new one
|
||||||
maxReconnectAttempts: 5,
|
this.socketService = sharedSocketInstance;
|
||||||
reconnectInterval: 1000,
|
|
||||||
heartbeatInterval: 30000,
|
// Enable operation tracking for echo suppression
|
||||||
enableAutoReconnect: true,
|
this.socketService.enableOperationTracking();
|
||||||
enableHeartbeat: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set up global event handlers
|
// Set up global event handlers
|
||||||
this.setupGlobalHandlers();
|
this.setupGlobalHandlers();
|
||||||
@@ -191,7 +190,7 @@ class TaskSocketService {
|
|||||||
const joinSuccess = this.socketService.send({
|
const joinSuccess = this.socketService.send({
|
||||||
type: 'join_project',
|
type: 'join_project',
|
||||||
project_id: projectId
|
project_id: projectId
|
||||||
});
|
}, true); // Enable operation tracking
|
||||||
|
|
||||||
if (!joinSuccess) {
|
if (!joinSuccess) {
|
||||||
throw new Error('Failed to send join_project message');
|
throw new Error('Failed to send join_project message');
|
||||||
@@ -214,7 +213,7 @@ class TaskSocketService {
|
|||||||
this.socketService.send({
|
this.socketService.send({
|
||||||
type: 'leave_project',
|
type: 'leave_project',
|
||||||
project_id: this.currentProjectId
|
project_id: this.currentProjectId
|
||||||
});
|
}, true); // Enable operation tracking
|
||||||
|
|
||||||
this.currentProjectId = null;
|
this.currentProjectId = null;
|
||||||
}
|
}
|
||||||
|
|||||||
185
archon-ui-main/src/types/document.ts
Normal file
185
archon-ui-main/src/types/document.ts
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
/**
|
||||||
|
* Type definitions for document content
|
||||||
|
* Replaces 'any' types with proper typed unions
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Markdown content stored as a string
|
||||||
|
*/
|
||||||
|
export interface MarkdownContent {
|
||||||
|
type: 'markdown';
|
||||||
|
markdown: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PRP (Product Requirement Prompt) document content
|
||||||
|
*/
|
||||||
|
export interface PRPContent {
|
||||||
|
type: 'prp';
|
||||||
|
document_type: 'prp';
|
||||||
|
title: string;
|
||||||
|
version: string;
|
||||||
|
author: string;
|
||||||
|
date: string;
|
||||||
|
status: 'draft' | 'review' | 'approved' | 'deprecated';
|
||||||
|
goal?: string;
|
||||||
|
why?: string[];
|
||||||
|
what?: {
|
||||||
|
description: string;
|
||||||
|
success_criteria: string[];
|
||||||
|
user_stories?: string[];
|
||||||
|
};
|
||||||
|
context?: {
|
||||||
|
documentation?: Array<{ source: string; why: string }>;
|
||||||
|
existing_code?: Array<{ file: string; purpose: string }>;
|
||||||
|
gotchas?: string[];
|
||||||
|
current_state?: string;
|
||||||
|
dependencies?: string[];
|
||||||
|
environment_variables?: string[];
|
||||||
|
};
|
||||||
|
implementation_blueprint?: Record<string, any>;
|
||||||
|
validation?: Record<string, any>;
|
||||||
|
additional_context?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic structured document content
|
||||||
|
*/
|
||||||
|
export interface StructuredContent {
|
||||||
|
type: 'structured';
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Union type for all document content types
|
||||||
|
*/
|
||||||
|
export type DocumentContent = string | MarkdownContent | PRPContent | StructuredContent;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete document interface with typed content
|
||||||
|
*/
|
||||||
|
export interface ProjectDocument {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
content?: DocumentContent;
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
document_type?: string;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if content is markdown
|
||||||
|
*/
|
||||||
|
export function isMarkdownContent(content: unknown): content is MarkdownContent {
|
||||||
|
return (
|
||||||
|
typeof content === 'object' &&
|
||||||
|
content !== null &&
|
||||||
|
'type' in content &&
|
||||||
|
(content as any).type === 'markdown' &&
|
||||||
|
'markdown' in content
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if content is PRP
|
||||||
|
*/
|
||||||
|
export function isPRPContent(content: unknown): content is PRPContent {
|
||||||
|
return (
|
||||||
|
typeof content === 'object' &&
|
||||||
|
content !== null &&
|
||||||
|
'document_type' in content &&
|
||||||
|
(content as any).document_type === 'prp'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if content is structured
|
||||||
|
*/
|
||||||
|
export function isStructuredContent(content: unknown): content is StructuredContent {
|
||||||
|
return (
|
||||||
|
typeof content === 'object' &&
|
||||||
|
content !== null &&
|
||||||
|
'type' in content &&
|
||||||
|
(content as any).type === 'structured'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to extract markdown string from any content type
|
||||||
|
*/
|
||||||
|
export function getMarkdownFromContent(content: DocumentContent | undefined): string {
|
||||||
|
if (!content) return '';
|
||||||
|
|
||||||
|
if (typeof content === 'string') {
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isMarkdownContent(content)) {
|
||||||
|
return content.markdown;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPRPContent(content)) {
|
||||||
|
// Convert PRP to markdown representation
|
||||||
|
return convertPRPToMarkdown(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isStructuredContent(content)) {
|
||||||
|
// Convert structured content to markdown
|
||||||
|
return JSON.stringify(content, null, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert PRP content to markdown string
|
||||||
|
*/
|
||||||
|
function convertPRPToMarkdown(prp: PRPContent): string {
|
||||||
|
let markdown = `# ${prp.title}\n\n`;
|
||||||
|
|
||||||
|
// Add metadata
|
||||||
|
markdown += `**Version:** ${prp.version}\n`;
|
||||||
|
markdown += `**Author:** ${prp.author}\n`;
|
||||||
|
markdown += `**Date:** ${prp.date}\n`;
|
||||||
|
markdown += `**Status:** ${prp.status}\n\n`;
|
||||||
|
|
||||||
|
// Add goal
|
||||||
|
if (prp.goal) {
|
||||||
|
markdown += `## Goal\n\n${prp.goal}\n\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add why section
|
||||||
|
if (prp.why && prp.why.length > 0) {
|
||||||
|
markdown += `## Why\n\n`;
|
||||||
|
prp.why.forEach(item => {
|
||||||
|
markdown += `- ${item}\n`;
|
||||||
|
});
|
||||||
|
markdown += '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add what section
|
||||||
|
if (prp.what) {
|
||||||
|
markdown += `## What\n\n${prp.what.description}\n\n`;
|
||||||
|
|
||||||
|
if (prp.what.success_criteria && prp.what.success_criteria.length > 0) {
|
||||||
|
markdown += `### Success Criteria\n\n`;
|
||||||
|
prp.what.success_criteria.forEach(item => {
|
||||||
|
markdown += `- ${item}\n`;
|
||||||
|
});
|
||||||
|
markdown += '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (prp.what.user_stories && prp.what.user_stories.length > 0) {
|
||||||
|
markdown += `### User Stories\n\n`;
|
||||||
|
prp.what.user_stories.forEach(item => {
|
||||||
|
markdown += `- ${item}\n`;
|
||||||
|
});
|
||||||
|
markdown += '\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add other sections as needed
|
||||||
|
|
||||||
|
return markdown;
|
||||||
|
}
|
||||||
81
archon-ui-main/src/types/jsonb.ts
Normal file
81
archon-ui-main/src/types/jsonb.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
/**
|
||||||
|
* Type definitions for JSONB fields in the database
|
||||||
|
* These replace the previous any[] types with proper discriminated unions
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document stored in project docs field
|
||||||
|
*/
|
||||||
|
export interface ProjectDocument {
|
||||||
|
type: 'document';
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
content: string;
|
||||||
|
metadata: Record<string, unknown>;
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Feature stored in project features field
|
||||||
|
*/
|
||||||
|
export interface ProjectFeature {
|
||||||
|
type: 'feature';
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
status: 'planned' | 'in-progress' | 'completed';
|
||||||
|
description: string;
|
||||||
|
priority?: number;
|
||||||
|
assignee?: string;
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data stored in project data field
|
||||||
|
*/
|
||||||
|
export interface ProjectData {
|
||||||
|
type: 'data';
|
||||||
|
key: string;
|
||||||
|
value: unknown;
|
||||||
|
timestamp: string;
|
||||||
|
source?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Source reference for tasks
|
||||||
|
*/
|
||||||
|
export interface TaskSource {
|
||||||
|
url?: string;
|
||||||
|
file?: string;
|
||||||
|
type: 'documentation' | 'code' | 'internal_docs' | 'external';
|
||||||
|
relevance?: string;
|
||||||
|
title?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Code example reference for tasks
|
||||||
|
*/
|
||||||
|
export interface TaskCodeExample {
|
||||||
|
file: string;
|
||||||
|
function?: string;
|
||||||
|
class?: string;
|
||||||
|
purpose: string;
|
||||||
|
language?: string;
|
||||||
|
snippet?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Union type for all JSONB content types
|
||||||
|
*/
|
||||||
|
export type JsonbContent = ProjectDocument | ProjectFeature | ProjectData;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-export type guards from the canonical location
|
||||||
|
* These use Zod schemas for validation which is more robust
|
||||||
|
*/
|
||||||
|
export {
|
||||||
|
isProjectDocument,
|
||||||
|
isProjectFeature,
|
||||||
|
isProjectData
|
||||||
|
} from '../utils/typeGuards';
|
||||||
@@ -11,17 +11,17 @@ export type UITaskStatus = 'backlog' | 'in-progress' | 'review' | 'complete';
|
|||||||
export type TaskPriority = 'low' | 'medium' | 'high' | 'critical';
|
export type TaskPriority = 'low' | 'medium' | 'high' | 'critical';
|
||||||
|
|
||||||
|
|
||||||
// Assignee type - simplified to predefined options
|
// Assignee type - flexible string to support MCP subagents
|
||||||
export type Assignee = 'User' | 'Archon' | 'AI IDE Agent';
|
export type Assignee = string;
|
||||||
|
|
||||||
// Base Project interface (matches database schema)
|
// Base Project interface (matches database schema)
|
||||||
export interface Project {
|
export interface Project {
|
||||||
id: string;
|
id: string;
|
||||||
title: string;
|
title: string;
|
||||||
prd?: Record<string, any>; // JSONB field
|
prd?: Record<string, any>; // JSONB field
|
||||||
docs?: any[]; // JSONB field
|
docs?: import('./jsonb').ProjectDocument[]; // Typed JSONB field
|
||||||
features?: any[]; // JSONB field
|
features?: import('./jsonb').ProjectFeature[]; // Typed JSONB field
|
||||||
data?: any[]; // JSONB field
|
data?: import('./jsonb').ProjectData[]; // Typed JSONB field
|
||||||
github_repo?: string;
|
github_repo?: string;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
@@ -59,8 +59,8 @@ export interface Task {
|
|||||||
assignee: Assignee; // Now a database column with enum constraint
|
assignee: Assignee; // Now a database column with enum constraint
|
||||||
task_order: number; // New database column for priority ordering
|
task_order: number; // New database column for priority ordering
|
||||||
feature?: string; // New database column for feature name
|
feature?: string; // New database column for feature name
|
||||||
sources?: any[]; // JSONB field
|
sources?: import('./jsonb').TaskSource[]; // Typed JSONB field
|
||||||
code_examples?: any[]; // JSONB field
|
code_examples?: import('./jsonb').TaskCodeExample[]; // Typed JSONB field
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
|
|
||||||
@@ -85,9 +85,9 @@ export interface CreateProjectRequest {
|
|||||||
pinned?: boolean;
|
pinned?: boolean;
|
||||||
// Note: PRD data should be stored as a document in the docs array with document_type="prd"
|
// Note: PRD data should be stored as a document in the docs array with document_type="prd"
|
||||||
// not as a direct 'prd' field since this column doesn't exist in the database
|
// not as a direct 'prd' field since this column doesn't exist in the database
|
||||||
docs?: any[];
|
docs?: import('./jsonb').ProjectDocument[];
|
||||||
features?: any[];
|
features?: import('./jsonb').ProjectFeature[];
|
||||||
data?: any[];
|
data?: import('./jsonb').ProjectData[];
|
||||||
technical_sources?: string[];
|
technical_sources?: string[];
|
||||||
business_sources?: string[];
|
business_sources?: string[];
|
||||||
}
|
}
|
||||||
@@ -98,9 +98,9 @@ export interface UpdateProjectRequest {
|
|||||||
description?: string;
|
description?: string;
|
||||||
github_repo?: string;
|
github_repo?: string;
|
||||||
prd?: Record<string, any>;
|
prd?: Record<string, any>;
|
||||||
docs?: any[];
|
docs?: import('./jsonb').ProjectDocument[];
|
||||||
features?: any[];
|
features?: import('./jsonb').ProjectFeature[];
|
||||||
data?: any[];
|
data?: import('./jsonb').ProjectData[];
|
||||||
technical_sources?: string[];
|
technical_sources?: string[];
|
||||||
business_sources?: string[];
|
business_sources?: string[];
|
||||||
pinned?: boolean;
|
pinned?: boolean;
|
||||||
@@ -117,8 +117,8 @@ export interface CreateTaskRequest {
|
|||||||
feature?: string;
|
feature?: string;
|
||||||
featureColor?: string;
|
featureColor?: string;
|
||||||
priority?: TaskPriority;
|
priority?: TaskPriority;
|
||||||
sources?: any[];
|
sources?: import('./jsonb').TaskSource[];
|
||||||
code_examples?: any[];
|
code_examples?: import('./jsonb').TaskCodeExample[];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update task request
|
// Update task request
|
||||||
@@ -131,8 +131,8 @@ export interface UpdateTaskRequest {
|
|||||||
feature?: string;
|
feature?: string;
|
||||||
featureColor?: string;
|
featureColor?: string;
|
||||||
priority?: TaskPriority;
|
priority?: TaskPriority;
|
||||||
sources?: any[];
|
sources?: import('./jsonb').TaskSource[];
|
||||||
code_examples?: any[];
|
code_examples?: import('./jsonb').TaskCodeExample[];
|
||||||
}
|
}
|
||||||
|
|
||||||
// MCP tool response types
|
// MCP tool response types
|
||||||
@@ -195,7 +195,13 @@ export const statusMappings = {
|
|||||||
export function dbTaskToUITask(dbTask: Task): Task {
|
export function dbTaskToUITask(dbTask: Task): Task {
|
||||||
return {
|
return {
|
||||||
...dbTask,
|
...dbTask,
|
||||||
uiStatus: statusMappings.dbToUI[dbTask.status]
|
uiStatus: statusMappings.dbToUI[dbTask.status || 'todo'],
|
||||||
|
// Ensure all required fields have defaults
|
||||||
|
title: dbTask.title || '',
|
||||||
|
description: dbTask.description || '',
|
||||||
|
assignee: dbTask.assignee || 'User',
|
||||||
|
feature: dbTask.feature || 'General',
|
||||||
|
task_order: dbTask.task_order || 0
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
66
archon-ui-main/src/utils/clipboard.ts
Normal file
66
archon-ui-main/src/utils/clipboard.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
/**
|
||||||
|
* Clipboard utility with fallback for non-secure contexts
|
||||||
|
* Works on both HTTPS and HTTP connections
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copies text to clipboard with fallback for non-secure contexts
|
||||||
|
* @param text - The text to copy to clipboard
|
||||||
|
* @returns Promise<boolean> - Returns true if successful, false otherwise
|
||||||
|
*/
|
||||||
|
export async function copyToClipboard(text: string): Promise<boolean> {
|
||||||
|
// First try the modern clipboard API (works on HTTPS/localhost)
|
||||||
|
if (navigator.clipboard && window.isSecureContext) {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(text);
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
console.warn('Clipboard API failed, trying fallback:', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback method using execCommand (works on HTTP)
|
||||||
|
try {
|
||||||
|
// Create a temporary textarea element
|
||||||
|
const textarea = document.createElement('textarea');
|
||||||
|
textarea.value = text;
|
||||||
|
textarea.style.position = 'fixed';
|
||||||
|
textarea.style.left = '-999999px';
|
||||||
|
textarea.style.top = '-999999px';
|
||||||
|
textarea.setAttribute('readonly', ''); // Prevent keyboard from showing on mobile
|
||||||
|
|
||||||
|
document.body.appendChild(textarea);
|
||||||
|
|
||||||
|
// Select the text
|
||||||
|
textarea.select();
|
||||||
|
textarea.setSelectionRange(0, 99999); // For mobile devices
|
||||||
|
|
||||||
|
// Copy the text
|
||||||
|
const successful = document.execCommand('copy');
|
||||||
|
|
||||||
|
// Remove the temporary element
|
||||||
|
document.body.removeChild(textarea);
|
||||||
|
|
||||||
|
if (successful) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
console.warn('execCommand copy failed');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Fallback copy method failed:', err);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if clipboard is available (for UI feedback)
|
||||||
|
* @returns boolean - Returns true if clipboard operations are available
|
||||||
|
*/
|
||||||
|
export function isClipboardAvailable(): boolean {
|
||||||
|
// Clipboard is available if either method works
|
||||||
|
return !!(
|
||||||
|
(navigator.clipboard && window.isSecureContext) ||
|
||||||
|
document.queryCommandSupported?.('copy')
|
||||||
|
);
|
||||||
|
}
|
||||||
91
archon-ui-main/src/utils/logger.ts
Normal file
91
archon-ui-main/src/utils/logger.ts
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
/**
|
||||||
|
* Simple logger utility for alpha development
|
||||||
|
* Can be toggled via LOG_LEVEL environment variable or disabled in production
|
||||||
|
*/
|
||||||
|
|
||||||
|
type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
||||||
|
|
||||||
|
interface LoggerConfig {
|
||||||
|
enabled: boolean;
|
||||||
|
level: LogLevel;
|
||||||
|
prefix?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
class Logger {
|
||||||
|
private config: LoggerConfig;
|
||||||
|
private levels: Record<LogLevel, number> = {
|
||||||
|
debug: 0,
|
||||||
|
info: 1,
|
||||||
|
warn: 2,
|
||||||
|
error: 3
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor(config: Partial<LoggerConfig> = {}) {
|
||||||
|
this.config = {
|
||||||
|
enabled: import.meta.env.DEV || import.meta.env.VITE_LOG_LEVEL !== 'none',
|
||||||
|
level: (import.meta.env.VITE_LOG_LEVEL as LogLevel) || 'info',
|
||||||
|
...config
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private shouldLog(level: LogLevel): boolean {
|
||||||
|
if (!this.config.enabled) return false;
|
||||||
|
return this.levels[level] >= this.levels[this.config.level];
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatMessage(level: LogLevel, message: string, data?: any): string {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
const prefix = this.config.prefix ? `[${this.config.prefix}]` : '';
|
||||||
|
return `${timestamp} [${level.toUpperCase()}]${prefix} ${message}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug(message: string, data?: any): void {
|
||||||
|
if (this.shouldLog('debug')) {
|
||||||
|
console.log(this.formatMessage('debug', message), data || '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info(message: string, data?: any): void {
|
||||||
|
if (this.shouldLog('info')) {
|
||||||
|
console.log(this.formatMessage('info', message), data || '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
warn(message: string, data?: any): void {
|
||||||
|
if (this.shouldLog('warn')) {
|
||||||
|
console.warn(this.formatMessage('warn', message), data || '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
error(message: string, data?: any): void {
|
||||||
|
if (this.shouldLog('error')) {
|
||||||
|
console.error(this.formatMessage('error', message), data || '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Time tracking for performance monitoring
|
||||||
|
time(label: string): void {
|
||||||
|
if (this.shouldLog('debug')) {
|
||||||
|
console.time(label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
timeEnd(label: string): void {
|
||||||
|
if (this.shouldLog('debug')) {
|
||||||
|
console.timeEnd(label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create logger instances for different modules
|
||||||
|
export const createLogger = (prefix?: string): Logger => {
|
||||||
|
return new Logger({ prefix });
|
||||||
|
};
|
||||||
|
|
||||||
|
// Default logger instance
|
||||||
|
export const logger = createLogger();
|
||||||
|
|
||||||
|
// Specialized loggers for different components
|
||||||
|
export const docsLogger = createLogger('DOCS');
|
||||||
|
export const socketLogger = createLogger('SOCKET');
|
||||||
|
export const apiLogger = createLogger('API');
|
||||||
283
archon-ui-main/src/utils/operationTracker.ts
Normal file
283
archon-ui-main/src/utils/operationTracker.ts
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
/**
|
||||||
|
* Operation tracking for Socket.IO echo suppression
|
||||||
|
* Tracks outgoing operations to prevent processing their echoes
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Using crypto.randomUUID instead of uuid package to avoid dependency bloat
|
||||||
|
const generateId = (): string => {
|
||||||
|
return crypto.randomUUID();
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface TrackedOperation {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
timestamp: number;
|
||||||
|
payload: unknown;
|
||||||
|
status: 'pending' | 'completed' | 'failed';
|
||||||
|
timeout?: NodeJS.Timeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OperationResult {
|
||||||
|
operationId: string;
|
||||||
|
success: boolean;
|
||||||
|
data?: unknown;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class OperationTracker {
|
||||||
|
private operations: Map<string, TrackedOperation> = new Map();
|
||||||
|
private operationTimeout: number = 30000; // 30 seconds default
|
||||||
|
private cleanupInterval: NodeJS.Timeout | null = null;
|
||||||
|
private readonly maxOperationAge = 60000; // 1 minute
|
||||||
|
|
||||||
|
constructor(timeout?: number) {
|
||||||
|
if (timeout) {
|
||||||
|
this.operationTimeout = timeout;
|
||||||
|
}
|
||||||
|
this.startCleanupInterval();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new tracked operation
|
||||||
|
*/
|
||||||
|
createOperation(type: string, payload?: unknown): string {
|
||||||
|
const operationId = generateId();
|
||||||
|
|
||||||
|
// Set timeout for operation
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
this.failOperation(operationId, 'Operation timed out');
|
||||||
|
}, this.operationTimeout);
|
||||||
|
|
||||||
|
const operation: TrackedOperation = {
|
||||||
|
id: operationId,
|
||||||
|
type,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
payload,
|
||||||
|
status: 'pending',
|
||||||
|
timeout
|
||||||
|
};
|
||||||
|
|
||||||
|
this.operations.set(operationId, operation);
|
||||||
|
return operationId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an operation exists and is pending
|
||||||
|
*/
|
||||||
|
isPending(operationId: string): boolean {
|
||||||
|
const operation = this.operations.get(operationId);
|
||||||
|
return operation?.status === 'pending';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an operation should be suppressed (exists and not failed)
|
||||||
|
*/
|
||||||
|
shouldSuppress(operationId: string): boolean {
|
||||||
|
const operation = this.operations.get(operationId);
|
||||||
|
return operation !== undefined && operation.status !== 'failed';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark an operation as completed
|
||||||
|
*/
|
||||||
|
completeOperation(operationId: string, data?: unknown): OperationResult {
|
||||||
|
const operation = this.operations.get(operationId);
|
||||||
|
|
||||||
|
if (!operation) {
|
||||||
|
return {
|
||||||
|
operationId,
|
||||||
|
success: false,
|
||||||
|
error: 'Operation not found'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear timeout
|
||||||
|
if (operation.timeout) {
|
||||||
|
clearTimeout(operation.timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
operation.status = 'completed';
|
||||||
|
|
||||||
|
return {
|
||||||
|
operationId,
|
||||||
|
success: true,
|
||||||
|
data
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark an operation as failed
|
||||||
|
*/
|
||||||
|
failOperation(operationId: string, error: string): OperationResult {
|
||||||
|
const operation = this.operations.get(operationId);
|
||||||
|
|
||||||
|
if (!operation) {
|
||||||
|
return {
|
||||||
|
operationId,
|
||||||
|
success: false,
|
||||||
|
error: 'Operation not found'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear timeout
|
||||||
|
if (operation.timeout) {
|
||||||
|
clearTimeout(operation.timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
operation.status = 'failed';
|
||||||
|
|
||||||
|
return {
|
||||||
|
operationId,
|
||||||
|
success: false,
|
||||||
|
error
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get operation details
|
||||||
|
*/
|
||||||
|
getOperation(operationId: string): TrackedOperation | undefined {
|
||||||
|
return this.operations.get(operationId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all pending operations of a specific type
|
||||||
|
*/
|
||||||
|
getPendingOperations(type?: string): TrackedOperation[] {
|
||||||
|
const pending = Array.from(this.operations.values()).filter(
|
||||||
|
op => op.status === 'pending'
|
||||||
|
);
|
||||||
|
|
||||||
|
if (type) {
|
||||||
|
return pending.filter(op => op.type === type);
|
||||||
|
}
|
||||||
|
|
||||||
|
return pending;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up old operations to prevent memory leaks
|
||||||
|
*/
|
||||||
|
private cleanup(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
const idsToDelete: string[] = [];
|
||||||
|
|
||||||
|
this.operations.forEach((operation, id) => {
|
||||||
|
if (now - operation.timestamp > this.maxOperationAge) {
|
||||||
|
// Clear timeout if still exists
|
||||||
|
if (operation.timeout) {
|
||||||
|
clearTimeout(operation.timeout);
|
||||||
|
}
|
||||||
|
idsToDelete.push(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
idsToDelete.forEach(id => this.operations.delete(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start periodic cleanup
|
||||||
|
*/
|
||||||
|
private startCleanupInterval(): void {
|
||||||
|
// Ensure we don't create multiple intervals
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run cleanup every 30 seconds
|
||||||
|
this.cleanupInterval = setInterval(() => {
|
||||||
|
this.cleanup();
|
||||||
|
}, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop cleanup interval and clear all operations
|
||||||
|
*/
|
||||||
|
destroy(): void {
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
this.cleanupInterval = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear all timeouts
|
||||||
|
this.operations.forEach(operation => {
|
||||||
|
if (operation.timeout) {
|
||||||
|
clearTimeout(operation.timeout);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.operations.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get statistics about tracked operations
|
||||||
|
*/
|
||||||
|
getStats(): {
|
||||||
|
total: number;
|
||||||
|
pending: number;
|
||||||
|
completed: number;
|
||||||
|
failed: number;
|
||||||
|
} {
|
||||||
|
let pending = 0;
|
||||||
|
let completed = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
this.operations.forEach(operation => {
|
||||||
|
switch (operation.status) {
|
||||||
|
case 'pending':
|
||||||
|
pending++;
|
||||||
|
break;
|
||||||
|
case 'completed':
|
||||||
|
completed++;
|
||||||
|
break;
|
||||||
|
case 'failed':
|
||||||
|
failed++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
total: this.operations.size,
|
||||||
|
pending,
|
||||||
|
completed,
|
||||||
|
failed
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear completed operations (keep pending and recently failed)
|
||||||
|
*/
|
||||||
|
clearCompleted(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
const idsToDelete: string[] = [];
|
||||||
|
|
||||||
|
this.operations.forEach((operation, id) => {
|
||||||
|
if (operation.status === 'completed' ||
|
||||||
|
(operation.status === 'failed' && now - operation.timestamp > 5000)) {
|
||||||
|
if (operation.timeout) {
|
||||||
|
clearTimeout(operation.timeout);
|
||||||
|
}
|
||||||
|
idsToDelete.push(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
idsToDelete.forEach(id => this.operations.delete(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Singleton instance for global operation tracking
|
||||||
|
let globalTracker: OperationTracker | null = null;
|
||||||
|
|
||||||
|
export function getGlobalOperationTracker(): OperationTracker {
|
||||||
|
if (!globalTracker) {
|
||||||
|
globalTracker = new OperationTracker();
|
||||||
|
}
|
||||||
|
return globalTracker;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resetGlobalOperationTracker(): void {
|
||||||
|
if (globalTracker) {
|
||||||
|
globalTracker.destroy();
|
||||||
|
globalTracker = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
252
archon-ui-main/src/utils/typeGuards.ts
Normal file
252
archon-ui-main/src/utils/typeGuards.ts
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
/**
|
||||||
|
* Type guards and utility functions for type safety
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
ProjectDocumentSchema,
|
||||||
|
ProjectFeatureSchema,
|
||||||
|
ProjectDataSchema,
|
||||||
|
TaskSourceSchema,
|
||||||
|
TaskCodeExampleSchema,
|
||||||
|
ProjectSchema,
|
||||||
|
TaskSchema
|
||||||
|
} from '../schemas/project.schemas';
|
||||||
|
import type {
|
||||||
|
ProjectDocument,
|
||||||
|
ProjectFeature,
|
||||||
|
ProjectData,
|
||||||
|
TaskSource,
|
||||||
|
TaskCodeExample
|
||||||
|
} from '../types/jsonb';
|
||||||
|
import type { Project, Task } from '../types/project';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a ProjectDocument
|
||||||
|
*/
|
||||||
|
export function isProjectDocument(value: unknown): value is ProjectDocument {
|
||||||
|
return ProjectDocumentSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a ProjectFeature
|
||||||
|
*/
|
||||||
|
export function isProjectFeature(value: unknown): value is ProjectFeature {
|
||||||
|
return ProjectFeatureSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is ProjectData
|
||||||
|
*/
|
||||||
|
export function isProjectData(value: unknown): value is ProjectData {
|
||||||
|
return ProjectDataSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a TaskSource
|
||||||
|
*/
|
||||||
|
export function isTaskSource(value: unknown): value is TaskSource {
|
||||||
|
return TaskSourceSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a TaskCodeExample
|
||||||
|
*/
|
||||||
|
export function isTaskCodeExample(value: unknown): value is TaskCodeExample {
|
||||||
|
return TaskCodeExampleSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a Project
|
||||||
|
*/
|
||||||
|
export function isProject(value: unknown): value is Project {
|
||||||
|
return ProjectSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a Task
|
||||||
|
*/
|
||||||
|
export function isTask(value: unknown): value is Task {
|
||||||
|
return TaskSchema.safeParse(value).success;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exhaustive type checking helper
|
||||||
|
* Throws an error if a case is not handled in a switch statement
|
||||||
|
*/
|
||||||
|
export function assertNever(value: never): never {
|
||||||
|
throw new Error(`Unexpected value: ${JSON.stringify(value)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safe JSON parse that returns unknown instead of any
|
||||||
|
*/
|
||||||
|
export function safeJsonParse(str: string): unknown {
|
||||||
|
try {
|
||||||
|
return JSON.parse(str);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a non-null object
|
||||||
|
*/
|
||||||
|
export function isObject(value: unknown): value is Record<string, unknown> {
|
||||||
|
return typeof value === 'object' && value !== null && !Array.isArray(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is an array
|
||||||
|
*/
|
||||||
|
export function isArray<T>(value: unknown, itemGuard?: (item: unknown) => item is T): value is T[] {
|
||||||
|
if (!Array.isArray(value)) return false;
|
||||||
|
if (!itemGuard) return true;
|
||||||
|
return value.every(itemGuard);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a string
|
||||||
|
*/
|
||||||
|
export function isString(value: unknown): value is string {
|
||||||
|
return typeof value === 'string';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a number
|
||||||
|
*/
|
||||||
|
export function isNumber(value: unknown): value is number {
|
||||||
|
return typeof value === 'number' && !isNaN(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a boolean
|
||||||
|
*/
|
||||||
|
export function isBoolean(value: unknown): value is boolean {
|
||||||
|
return typeof value === 'boolean';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility type for deep partial objects
|
||||||
|
*/
|
||||||
|
export type DeepPartial<T> = T extends object ? {
|
||||||
|
[P in keyof T]?: DeepPartial<T[P]>;
|
||||||
|
} : T;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility type for strict omit that checks keys
|
||||||
|
*/
|
||||||
|
export type StrictOmit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility type for strict extract
|
||||||
|
*/
|
||||||
|
export type StrictExtract<T, U extends T> = U;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type-safe event map for typed event emitters
|
||||||
|
*/
|
||||||
|
export type EventMap = Record<string, (...args: unknown[]) => void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type-safe event emitter class
|
||||||
|
*/
|
||||||
|
export class TypedEventEmitter<T extends EventMap> {
|
||||||
|
private handlers: Partial<T> = {};
|
||||||
|
|
||||||
|
on<K extends keyof T>(event: K, handler: T[K]): void {
|
||||||
|
this.handlers[event] = handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
off<K extends keyof T>(event: K): void {
|
||||||
|
delete this.handlers[event];
|
||||||
|
}
|
||||||
|
|
||||||
|
emit<K extends keyof T>(event: K, ...args: Parameters<T[K]>): void {
|
||||||
|
const handler = this.handlers[event];
|
||||||
|
if (handler) {
|
||||||
|
handler(...args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function to filter out null and undefined values from arrays
|
||||||
|
*/
|
||||||
|
export function filterNullish<T>(array: (T | null | undefined)[]): T[] {
|
||||||
|
return array.filter((item): item is T => item != null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function to safely access nested properties
|
||||||
|
*/
|
||||||
|
export function getNestedProperty<T>(
|
||||||
|
obj: unknown,
|
||||||
|
path: string,
|
||||||
|
defaultValue?: T
|
||||||
|
): T | undefined {
|
||||||
|
if (!isObject(obj)) return defaultValue;
|
||||||
|
|
||||||
|
const keys = path.split('.');
|
||||||
|
let current: unknown = obj;
|
||||||
|
|
||||||
|
for (const key of keys) {
|
||||||
|
if (!isObject(current) || !(key in current)) {
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
current = current[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
return current as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if a value has a specific property
|
||||||
|
*/
|
||||||
|
export function hasProperty<K extends string>(
|
||||||
|
obj: unknown,
|
||||||
|
key: K
|
||||||
|
): obj is Record<K, unknown> {
|
||||||
|
return isObject(obj) && key in obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a valid UUID
|
||||||
|
*/
|
||||||
|
export function isUUID(value: unknown): value is string {
|
||||||
|
if (!isString(value)) return false;
|
||||||
|
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
|
||||||
|
return uuidRegex.test(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard to check if value is a valid ISO date string
|
||||||
|
*/
|
||||||
|
export function isISODateString(value: unknown): value is string {
|
||||||
|
if (!isString(value)) return false;
|
||||||
|
const date = new Date(value);
|
||||||
|
return !isNaN(date.getTime()) && date.toISOString() === value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function to ensure a value is an array
|
||||||
|
*/
|
||||||
|
export function ensureArray<T>(value: T | T[]): T[] {
|
||||||
|
return Array.isArray(value) ? value : [value];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function to group array items by a key
|
||||||
|
*/
|
||||||
|
export function groupBy<T, K extends keyof T>(
|
||||||
|
array: T[],
|
||||||
|
key: K
|
||||||
|
): Record<string, T[]> {
|
||||||
|
return array.reduce((groups, item) => {
|
||||||
|
const groupKey = String(item[key]);
|
||||||
|
if (!groups[groupKey]) {
|
||||||
|
groups[groupKey] = [];
|
||||||
|
}
|
||||||
|
groups[groupKey].push(item);
|
||||||
|
return groups;
|
||||||
|
}, {} as Record<string, T[]>);
|
||||||
|
}
|
||||||
176
archon-ui-main/test/components/ErrorBoundary.test.tsx
Normal file
176
archon-ui-main/test/components/ErrorBoundary.test.tsx
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
import { render, screen } from '@testing-library/react'
|
||||||
|
import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'
|
||||||
|
import { ErrorBoundary } from '@/components/ErrorBoundary'
|
||||||
|
import React from 'react'
|
||||||
|
|
||||||
|
// Component that throws an error for testing
|
||||||
|
const ThrowError: React.FC<{ shouldThrow: boolean }> = ({ shouldThrow }) => {
|
||||||
|
if (shouldThrow) {
|
||||||
|
throw new Error('Test error message')
|
||||||
|
}
|
||||||
|
return <div>No error</div>
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock console.error to suppress error output in tests
|
||||||
|
const originalError = console.error
|
||||||
|
beforeEach(() => {
|
||||||
|
console.error = vi.fn()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.error = originalError
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('ErrorBoundary Component', () => {
|
||||||
|
test('renders children when there is no error', () => {
|
||||||
|
render(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<div>Test content</div>
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('Test content')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('catches errors and displays fallback UI', () => {
|
||||||
|
render(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Should show error fallback
|
||||||
|
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument()
|
||||||
|
expect(screen.queryByText('No error')).not.toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('displays custom error fallback when provided', () => {
|
||||||
|
const CustomFallback = ({ error }: { error: Error }) => (
|
||||||
|
<div>Custom error: {error.message}</div>
|
||||||
|
)
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ErrorBoundary errorFallback={CustomFallback}>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('Custom error: Test error message')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders different UI for page-level errors', () => {
|
||||||
|
render(
|
||||||
|
<ErrorBoundary isPageLevel={true}>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Page-level errors should have specific styling
|
||||||
|
const errorContainer = screen.getByText(/Something went wrong/i).closest('div')
|
||||||
|
expect(errorContainer?.className).toContain('min-h-screen')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders different UI for component-level errors', () => {
|
||||||
|
render(
|
||||||
|
<ErrorBoundary isPageLevel={false}>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Component-level errors should have different styling
|
||||||
|
const errorContainer = screen.getByText(/Something went wrong/i).closest('div')
|
||||||
|
expect(errorContainer?.className).not.toContain('min-h-screen')
|
||||||
|
expect(errorContainer?.className).toContain('rounded-lg')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('passes error object to error fallback', () => {
|
||||||
|
const error = new Error('Specific error message')
|
||||||
|
const CustomFallback = ({ error: err }: { error: Error }) => (
|
||||||
|
<div>
|
||||||
|
<div>Error occurred</div>
|
||||||
|
<div>{err.message}</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ErrorBoundary errorFallback={CustomFallback}>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('Error occurred')).toBeInTheDocument()
|
||||||
|
expect(screen.getByText('Test error message')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles multiple error boundaries at different levels', () => {
|
||||||
|
const OuterFallback = () => <div>Outer error</div>
|
||||||
|
const InnerFallback = () => <div>Inner error</div>
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ErrorBoundary errorFallback={OuterFallback}>
|
||||||
|
<div>
|
||||||
|
<ErrorBoundary errorFallback={InnerFallback}>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
</div>
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Inner boundary should catch the error
|
||||||
|
expect(screen.getByText('Inner error')).toBeInTheDocument()
|
||||||
|
expect(screen.queryByText('Outer error')).not.toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('recovers when error condition is resolved', () => {
|
||||||
|
const { rerender } = render(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error is shown
|
||||||
|
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument()
|
||||||
|
|
||||||
|
// When component no longer throws, it should recover
|
||||||
|
rerender(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<ThrowError shouldThrow={false} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Note: React Error Boundaries don't automatically recover,
|
||||||
|
// so the error state persists. This is expected behavior.
|
||||||
|
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('logs errors to console in development', () => {
|
||||||
|
const consoleErrorSpy = vi.spyOn(console, 'error')
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<ThrowError shouldThrow={true} />
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error should be logged
|
||||||
|
expect(consoleErrorSpy).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders with suspense wrapper when specified', () => {
|
||||||
|
// Testing SuspenseErrorBoundary variant
|
||||||
|
const LazyComponent = React.lazy(() =>
|
||||||
|
Promise.resolve({ default: () => <div>Lazy loaded</div> })
|
||||||
|
)
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ErrorBoundary>
|
||||||
|
<React.Suspense fallback={<div>Loading...</div>}>
|
||||||
|
<LazyComponent />
|
||||||
|
</React.Suspense>
|
||||||
|
</ErrorBoundary>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Should show loading initially
|
||||||
|
expect(screen.getByText('Loading...')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
})
|
||||||
163
archon-ui-main/test/components/layouts/MainLayout.test.tsx
Normal file
163
archon-ui-main/test/components/layouts/MainLayout.test.tsx
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
import { render, screen } from '@testing-library/react'
|
||||||
|
import { describe, test, expect, vi } from 'vitest'
|
||||||
|
import { MainLayout } from '@/components/layouts/MainLayout'
|
||||||
|
import { BrowserRouter } from 'react-router-dom'
|
||||||
|
|
||||||
|
// Mock the child components
|
||||||
|
vi.mock('@/components/layouts/SideNavigation', () => ({
|
||||||
|
SideNavigation: () => <nav data-testid="side-navigation">Side Navigation</nav>
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/components/DisconnectScreenOverlay', () => ({
|
||||||
|
DisconnectScreenOverlay: () => null // Usually hidden
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Mock contexts
|
||||||
|
vi.mock('@/contexts/SettingsContext', () => ({
|
||||||
|
useSettings: () => ({
|
||||||
|
settings: {
|
||||||
|
enableProjects: true,
|
||||||
|
theme: 'dark'
|
||||||
|
},
|
||||||
|
updateSettings: vi.fn()
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
|
||||||
|
describe('MainLayout Component', () => {
|
||||||
|
const renderWithRouter = (children: React.ReactNode) => {
|
||||||
|
return render(
|
||||||
|
<BrowserRouter>
|
||||||
|
{children}
|
||||||
|
</BrowserRouter>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
test('renders children correctly', () => {
|
||||||
|
renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>Page content</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('Page content')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders side navigation', () => {
|
||||||
|
renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>Content</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByTestId('side-navigation')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('applies layout structure classes', () => {
|
||||||
|
const { container } = renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>Content</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Check for flex layout
|
||||||
|
const layoutContainer = container.querySelector('.flex')
|
||||||
|
expect(layoutContainer).toBeInTheDocument()
|
||||||
|
|
||||||
|
// Check for main content area
|
||||||
|
const mainContent = container.querySelector('main')
|
||||||
|
expect(mainContent).toBeInTheDocument()
|
||||||
|
expect(mainContent?.className).toContain('flex-1')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders multiple children', () => {
|
||||||
|
renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>First child</div>
|
||||||
|
<div>Second child</div>
|
||||||
|
<section>Third child</section>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('First child')).toBeInTheDocument()
|
||||||
|
expect(screen.getByText('Second child')).toBeInTheDocument()
|
||||||
|
expect(screen.getByText('Third child')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('maintains responsive layout', () => {
|
||||||
|
const { container } = renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>Responsive content</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
const mainContent = container.querySelector('main')
|
||||||
|
expect(mainContent?.className).toContain('overflow-x-hidden')
|
||||||
|
expect(mainContent?.className).toContain('overflow-y-auto')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('applies dark mode background classes', () => {
|
||||||
|
const { container } = renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div>Dark mode content</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
const layoutContainer = container.firstChild as HTMLElement
|
||||||
|
expect(layoutContainer.className).toContain('bg-gray-50')
|
||||||
|
expect(layoutContainer.className).toContain('dark:bg-black')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('renders empty children gracefully', () => {
|
||||||
|
const { container } = renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
{null}
|
||||||
|
{undefined}
|
||||||
|
{false}
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
// Should still render the layout structure
|
||||||
|
expect(container.querySelector('.flex')).toBeInTheDocument()
|
||||||
|
expect(screen.getByTestId('side-navigation')).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles complex nested components', () => {
|
||||||
|
renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div className="page-container">
|
||||||
|
<header>
|
||||||
|
<h1>Page Title</h1>
|
||||||
|
</header>
|
||||||
|
<section>
|
||||||
|
<article>
|
||||||
|
<p>Article content</p>
|
||||||
|
</article>
|
||||||
|
</section>
|
||||||
|
</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(screen.getByText('Page Title')).toBeInTheDocument()
|
||||||
|
expect(screen.getByText('Article content')).toBeInTheDocument()
|
||||||
|
expect(screen.getByRole('heading', { level: 1 })).toBeInTheDocument()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('preserves child component props', () => {
|
||||||
|
renderWithRouter(
|
||||||
|
<MainLayout>
|
||||||
|
<div
|
||||||
|
id="test-id"
|
||||||
|
className="custom-class"
|
||||||
|
data-testid="custom-content"
|
||||||
|
>
|
||||||
|
Custom content
|
||||||
|
</div>
|
||||||
|
</MainLayout>
|
||||||
|
)
|
||||||
|
|
||||||
|
const customDiv = screen.getByTestId('custom-content')
|
||||||
|
expect(customDiv).toHaveAttribute('id', 'test-id')
|
||||||
|
expect(customDiv).toHaveClass('custom-class')
|
||||||
|
expect(customDiv).toHaveTextContent('Custom content')
|
||||||
|
})
|
||||||
|
})
|
||||||
288
archon-ui-main/test/components/project-tasks/TasksTab.test.tsx
Normal file
288
archon-ui-main/test/components/project-tasks/TasksTab.test.tsx
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
import { describe, test, expect, vi, beforeEach } from 'vitest'
|
||||||
|
|
||||||
|
// Mock data for testing
|
||||||
|
const mockTasks = [
|
||||||
|
{
|
||||||
|
id: 'task-1',
|
||||||
|
title: 'First task',
|
||||||
|
description: 'Description 1',
|
||||||
|
status: 'todo',
|
||||||
|
assignee: 'User',
|
||||||
|
task_order: 1,
|
||||||
|
feature: 'feature-1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'task-2',
|
||||||
|
title: 'Second task',
|
||||||
|
description: 'Description 2',
|
||||||
|
status: 'todo',
|
||||||
|
assignee: 'AI IDE Agent',
|
||||||
|
task_order: 2,
|
||||||
|
feature: 'feature-1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'task-3',
|
||||||
|
title: 'Third task',
|
||||||
|
description: 'Description 3',
|
||||||
|
status: 'todo',
|
||||||
|
assignee: 'Archon',
|
||||||
|
task_order: 3,
|
||||||
|
feature: 'feature-2'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'task-4',
|
||||||
|
title: 'Fourth task',
|
||||||
|
description: 'Description 4',
|
||||||
|
status: 'doing',
|
||||||
|
assignee: 'User',
|
||||||
|
task_order: 1,
|
||||||
|
feature: 'feature-2'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
describe('TasksTab - Task Reordering', () => {
|
||||||
|
let reorderTasks: any
|
||||||
|
let handleReorderTasks: any
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.resetModules()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Sequential Ordering System', () => {
|
||||||
|
test('maintains sequential order (1, 2, 3, ...) after reordering', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Move task from index 0 to index 2
|
||||||
|
const reordered = moveTask(tasks, 0, 2)
|
||||||
|
|
||||||
|
// Check that task_order is sequential
|
||||||
|
expect(reordered[0].task_order).toBe(1)
|
||||||
|
expect(reordered[1].task_order).toBe(2)
|
||||||
|
expect(reordered[2].task_order).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('updates task_order for all affected tasks', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Move last task to first position
|
||||||
|
const reordered = moveTask(tasks, 2, 0)
|
||||||
|
|
||||||
|
expect(reordered[0].id).toBe('task-3')
|
||||||
|
expect(reordered[0].task_order).toBe(1)
|
||||||
|
expect(reordered[1].id).toBe('task-1')
|
||||||
|
expect(reordered[1].task_order).toBe(2)
|
||||||
|
expect(reordered[2].id).toBe('task-2')
|
||||||
|
expect(reordered[2].task_order).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles moving task within same status column', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Move middle task to end
|
||||||
|
const reordered = moveTask(tasks, 1, 2)
|
||||||
|
|
||||||
|
expect(reordered[0].id).toBe('task-1')
|
||||||
|
expect(reordered[1].id).toBe('task-3')
|
||||||
|
expect(reordered[2].id).toBe('task-2')
|
||||||
|
|
||||||
|
// All should have sequential ordering
|
||||||
|
reordered.forEach((task, index) => {
|
||||||
|
expect(task.task_order).toBe(index + 1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Batch Reorder Persistence', () => {
|
||||||
|
test('batches multiple reorder operations', () => {
|
||||||
|
const persistBatch = vi.fn()
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Simulate multiple rapid reorders
|
||||||
|
const reordered1 = moveTask(tasks, 0, 2)
|
||||||
|
const reordered2 = moveTask(reordered1, 1, 0)
|
||||||
|
|
||||||
|
// In actual implementation, these would be debounced
|
||||||
|
// and sent as a single batch update
|
||||||
|
expect(reordered2[0].task_order).toBe(1)
|
||||||
|
expect(reordered2[1].task_order).toBe(2)
|
||||||
|
expect(reordered2[2].task_order).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('preserves lastUpdate timestamp for optimistic updates', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
const timestamp = Date.now()
|
||||||
|
|
||||||
|
const reordered = moveTask(tasks, 0, 2, timestamp)
|
||||||
|
|
||||||
|
// All reordered tasks should have the lastUpdate timestamp
|
||||||
|
reordered.forEach(task => {
|
||||||
|
expect(task.lastUpdate).toBe(timestamp)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Race Condition Prevention', () => {
|
||||||
|
test('ignores updates for deleted tasks', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
const deletedTaskId = 'task-2'
|
||||||
|
|
||||||
|
// Remove task-2 to simulate deletion
|
||||||
|
const afterDeletion = tasks.filter(t => t.id !== deletedTaskId)
|
||||||
|
|
||||||
|
// Try to reorder with deleted task - should handle gracefully
|
||||||
|
const reordered = afterDeletion.map((task, index) => ({
|
||||||
|
...task,
|
||||||
|
task_order: index + 1
|
||||||
|
}))
|
||||||
|
|
||||||
|
expect(reordered.length).toBe(2)
|
||||||
|
expect(reordered.find(t => t.id === deletedTaskId)).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles concurrent updates with temporary task replacement', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
const tempTask = { ...tasks[0], title: 'Temporary update' }
|
||||||
|
|
||||||
|
// Replace task temporarily (optimistic update)
|
||||||
|
const withTemp = tasks.map(t =>
|
||||||
|
t.id === tempTask.id ? tempTask : t
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(withTemp[0].title).toBe('Temporary update')
|
||||||
|
expect(withTemp[0].id).toBe(tasks[0].id)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('maintains order consistency during concurrent operations', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Simulate two concurrent reorder operations
|
||||||
|
const reorder1 = moveTask([...tasks], 0, 2)
|
||||||
|
const reorder2 = moveTask([...tasks], 2, 1)
|
||||||
|
|
||||||
|
// Both should maintain sequential ordering
|
||||||
|
reorder1.forEach((task, index) => {
|
||||||
|
expect(task.task_order).toBe(index + 1)
|
||||||
|
})
|
||||||
|
|
||||||
|
reorder2.forEach((task, index) => {
|
||||||
|
expect(task.task_order).toBe(index + 1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Cross-Status Reordering', () => {
|
||||||
|
test('handles moving task to different status column', () => {
|
||||||
|
const todoTasks = mockTasks.filter(t => t.status === 'todo')
|
||||||
|
const doingTasks = mockTasks.filter(t => t.status === 'doing')
|
||||||
|
|
||||||
|
// Move first todo task to doing column
|
||||||
|
const taskToMove = todoTasks[0]
|
||||||
|
const updatedTask = { ...taskToMove, status: 'doing' }
|
||||||
|
|
||||||
|
// Update todo column (remove task)
|
||||||
|
const newTodoTasks = todoTasks.slice(1).map((task, index) => ({
|
||||||
|
...task,
|
||||||
|
task_order: index + 1
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Update doing column (add task at position)
|
||||||
|
const newDoingTasks = [
|
||||||
|
updatedTask,
|
||||||
|
...doingTasks
|
||||||
|
].map((task, index) => ({
|
||||||
|
...task,
|
||||||
|
task_order: index + 1
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Verify sequential ordering in both columns
|
||||||
|
expect(newTodoTasks.every((t, i) => t.task_order === i + 1)).toBe(true)
|
||||||
|
expect(newDoingTasks.every((t, i) => t.task_order === i + 1)).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Edge Cases', () => {
|
||||||
|
test('handles empty task list', () => {
|
||||||
|
const tasks: any[] = []
|
||||||
|
const reordered = moveTask(tasks, 0, 0)
|
||||||
|
|
||||||
|
expect(reordered).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles single task', () => {
|
||||||
|
const tasks = [mockTasks[0]]
|
||||||
|
const reordered = moveTask(tasks, 0, 0)
|
||||||
|
|
||||||
|
expect(reordered[0].task_order).toBe(1)
|
||||||
|
expect(reordered.length).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles invalid indices gracefully', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
|
||||||
|
// Try to move with out-of-bounds index
|
||||||
|
const reordered = moveTask(tasks, 10, 0)
|
||||||
|
|
||||||
|
// Should return tasks unchanged
|
||||||
|
expect(reordered).toEqual(tasks)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('preserves task data during reorder', () => {
|
||||||
|
const tasks = [...mockTasks.filter(t => t.status === 'todo')]
|
||||||
|
const originalTask = { ...tasks[0] }
|
||||||
|
|
||||||
|
const reordered = moveTask(tasks, 0, 2)
|
||||||
|
const movedTask = reordered.find(t => t.id === originalTask.id)
|
||||||
|
|
||||||
|
// All properties except task_order should be preserved
|
||||||
|
expect(movedTask?.title).toBe(originalTask.title)
|
||||||
|
expect(movedTask?.description).toBe(originalTask.description)
|
||||||
|
expect(movedTask?.assignee).toBe(originalTask.assignee)
|
||||||
|
expect(movedTask?.feature).toBe(originalTask.feature)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Flexible Assignee Support', () => {
|
||||||
|
test('supports any assignee name string', () => {
|
||||||
|
const customAssignees = [
|
||||||
|
'prp-executor',
|
||||||
|
'prp-validator',
|
||||||
|
'Custom Agent',
|
||||||
|
'test-agent-123'
|
||||||
|
]
|
||||||
|
|
||||||
|
customAssignees.forEach(assignee => {
|
||||||
|
const task = { ...mockTasks[0], assignee }
|
||||||
|
expect(task.assignee).toBe(assignee)
|
||||||
|
expect(typeof task.assignee).toBe('string')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles empty assignee gracefully', () => {
|
||||||
|
const task = { ...mockTasks[0], assignee: '' }
|
||||||
|
expect(task.assignee).toBe('')
|
||||||
|
|
||||||
|
// Should default to 'AI IDE Agent' in UI
|
||||||
|
const displayAssignee = task.assignee || 'AI IDE Agent'
|
||||||
|
expect(displayAssignee).toBe('AI IDE Agent')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Helper function to simulate task reordering
|
||||||
|
function moveTask(tasks: any[], fromIndex: number, toIndex: number, timestamp?: number): any[] {
|
||||||
|
if (fromIndex < 0 || fromIndex >= tasks.length ||
|
||||||
|
toIndex < 0 || toIndex >= tasks.length) {
|
||||||
|
return tasks
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = [...tasks]
|
||||||
|
const [movedTask] = result.splice(fromIndex, 1)
|
||||||
|
result.splice(toIndex, 0, movedTask)
|
||||||
|
|
||||||
|
// Update task_order to be sequential
|
||||||
|
return result.map((task, index) => ({
|
||||||
|
...task,
|
||||||
|
task_order: index + 1,
|
||||||
|
...(timestamp ? { lastUpdate: timestamp } : {})
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -35,41 +35,29 @@ describe('API Configuration', () => {
|
|||||||
it('should return empty string in production mode', async () => {
|
it('should return empty string in production mode', async () => {
|
||||||
// Set production mode
|
// Set production mode
|
||||||
(import.meta.env as any).PROD = true;
|
(import.meta.env as any).PROD = true;
|
||||||
|
delete (import.meta.env as any).VITE_API_URL;
|
||||||
// It should not use VITE_API_URL
|
|
||||||
(import.meta.env as any).VITE_API_URL = 'http://custom-api:9999';
|
|
||||||
|
|
||||||
const { getApiUrl } = await import('../../src/config/api');
|
const { getApiUrl } = await import('../../src/config/api');
|
||||||
expect(getApiUrl()).toBe('');
|
expect(getApiUrl()).toBe('');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use default port 8181 when no port environment variables are set in development', async () => {
|
it('should throw error when ARCHON_SERVER_PORT is not set in development', async () => {
|
||||||
// Development mode without any port variables
|
// Development mode without port
|
||||||
delete (import.meta.env as any).PROD;
|
delete (import.meta.env as any).PROD;
|
||||||
delete (import.meta.env as any).VITE_API_URL;
|
delete (import.meta.env as any).VITE_API_URL;
|
||||||
delete (import.meta.env as any).VITE_ARCHON_SERVER_PORT;
|
|
||||||
delete (import.meta.env as any).VITE_PORT;
|
|
||||||
delete (import.meta.env as any).ARCHON_SERVER_PORT;
|
delete (import.meta.env as any).ARCHON_SERVER_PORT;
|
||||||
|
|
||||||
// Mock window.location
|
// The error will be thrown during module import because API_FULL_URL calls getApiUrl()
|
||||||
Object.defineProperty(window, 'location', {
|
await expect(async () => {
|
||||||
value: {
|
await import('../../src/config/api');
|
||||||
protocol: 'http:',
|
}).rejects.toThrow('ARCHON_SERVER_PORT environment variable is required');
|
||||||
hostname: 'localhost'
|
|
||||||
},
|
|
||||||
writable: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const { getApiUrl } = await import('../../src/config/api');
|
|
||||||
|
|
||||||
expect(getApiUrl()).toBe('http://localhost:8181');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use VITE_ARCHON_SERVER_PORT when set in development', async () => {
|
it('should use ARCHON_SERVER_PORT when set in development', async () => {
|
||||||
// Development mode with custom port via VITE_ prefix
|
// Development mode with custom port
|
||||||
delete (import.meta.env as any).PROD;
|
delete (import.meta.env as any).PROD;
|
||||||
delete (import.meta.env as any).VITE_API_URL;
|
delete (import.meta.env as any).VITE_API_URL;
|
||||||
(import.meta.env as any).VITE_ARCHON_SERVER_PORT = '9191';
|
(import.meta.env as any).ARCHON_SERVER_PORT = '9191';
|
||||||
|
|
||||||
// Mock window.location
|
// Mock window.location
|
||||||
Object.defineProperty(window, 'location', {
|
Object.defineProperty(window, 'location', {
|
||||||
@@ -85,10 +73,10 @@ describe('API Configuration', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should use custom port with https protocol', async () => {
|
it('should use custom port with https protocol', async () => {
|
||||||
// Development mode with custom port and https via VITE_ prefix
|
// Development mode with custom port and https
|
||||||
delete (import.meta.env as any).PROD;
|
delete (import.meta.env as any).PROD;
|
||||||
delete (import.meta.env as any).VITE_API_URL;
|
delete (import.meta.env as any).VITE_API_URL;
|
||||||
(import.meta.env as any).VITE_ARCHON_SERVER_PORT = '8443';
|
(import.meta.env as any).ARCHON_SERVER_PORT = '8443';
|
||||||
|
|
||||||
// Mock window.location with https
|
// Mock window.location with https
|
||||||
Object.defineProperty(window, 'location', {
|
Object.defineProperty(window, 'location', {
|
||||||
@@ -151,7 +139,7 @@ describe('API Configuration', () => {
|
|||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
delete (import.meta.env as any).PROD;
|
delete (import.meta.env as any).PROD;
|
||||||
delete (import.meta.env as any).VITE_API_URL;
|
delete (import.meta.env as any).VITE_API_URL;
|
||||||
(import.meta.env as any).VITE_ARCHON_SERVER_PORT = port;
|
(import.meta.env as any).ARCHON_SERVER_PORT = port;
|
||||||
|
|
||||||
Object.defineProperty(window, 'location', {
|
Object.defineProperty(window, 'location', {
|
||||||
value: {
|
value: {
|
||||||
@@ -168,71 +156,4 @@ describe('API Configuration', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('MCP Client Service Configuration', () => {
|
// MCP Client Service Configuration tests removed - service not currently in use
|
||||||
let originalEnv: any;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
originalEnv = { ...import.meta.env };
|
|
||||||
vi.resetModules();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
Object.keys(import.meta.env).forEach(key => {
|
|
||||||
delete (import.meta.env as any)[key];
|
|
||||||
});
|
|
||||||
Object.assign(import.meta.env, originalEnv);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw error when ARCHON_MCP_PORT is not set', async () => {
|
|
||||||
delete (import.meta.env as any).ARCHON_MCP_PORT;
|
|
||||||
|
|
||||||
const { mcpClientService } = await import('../../src/services/mcpClientService');
|
|
||||||
|
|
||||||
await expect(mcpClientService.createArchonClient()).rejects.toThrow('ARCHON_MCP_PORT environment variable is required');
|
|
||||||
await expect(mcpClientService.createArchonClient()).rejects.toThrow('Default value: 8051');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use ARCHON_MCP_PORT when set', async () => {
|
|
||||||
(import.meta.env as any).ARCHON_MCP_PORT = '9051';
|
|
||||||
(import.meta.env as any).ARCHON_SERVER_PORT = '8181';
|
|
||||||
|
|
||||||
// Mock window.location
|
|
||||||
Object.defineProperty(window, 'location', {
|
|
||||||
value: {
|
|
||||||
protocol: 'http:',
|
|
||||||
hostname: 'localhost'
|
|
||||||
},
|
|
||||||
writable: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mock the API call
|
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
|
||||||
ok: true,
|
|
||||||
json: async () => ({
|
|
||||||
id: 'test-id',
|
|
||||||
name: 'Archon',
|
|
||||||
transport_type: 'http',
|
|
||||||
connection_status: 'connected'
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
const { mcpClientService } = await import('../../src/services/mcpClientService');
|
|
||||||
|
|
||||||
try {
|
|
||||||
await mcpClientService.createArchonClient();
|
|
||||||
|
|
||||||
// Verify the fetch was called with the correct URL
|
|
||||||
expect(global.fetch).toHaveBeenCalledWith(
|
|
||||||
expect.stringContaining('/api/mcp/clients'),
|
|
||||||
expect.objectContaining({
|
|
||||||
method: 'POST',
|
|
||||||
body: expect.stringContaining('9051')
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
// If it fails due to actual API call, that's okay for this test
|
|
||||||
// We're mainly testing that it constructs the URL correctly
|
|
||||||
expect(error).toBeDefined();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
195
archon-ui-main/test/services/socketIOService.test.ts
Normal file
195
archon-ui-main/test/services/socketIOService.test.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'
|
||||||
|
import { io, Socket } from 'socket.io-client'
|
||||||
|
|
||||||
|
// Mock socket.io-client
|
||||||
|
vi.mock('socket.io-client', () => ({
|
||||||
|
io: vi.fn(() => ({
|
||||||
|
on: vi.fn(),
|
||||||
|
off: vi.fn(),
|
||||||
|
emit: vi.fn(),
|
||||||
|
disconnect: vi.fn(),
|
||||||
|
connect: vi.fn(),
|
||||||
|
connected: true,
|
||||||
|
id: 'test-socket-id'
|
||||||
|
}))
|
||||||
|
}))
|
||||||
|
|
||||||
|
describe('socketIOService - Shared Instance Pattern', () => {
|
||||||
|
let socketIOService: any
|
||||||
|
let knowledgeSocketIO: any
|
||||||
|
let taskUpdateSocketIO: any
|
||||||
|
let projectListSocketIO: any
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Reset all mocks
|
||||||
|
vi.resetAllMocks()
|
||||||
|
vi.resetModules()
|
||||||
|
|
||||||
|
// Import fresh instances
|
||||||
|
const module = await import('../../src/services/socketIOService')
|
||||||
|
socketIOService = module
|
||||||
|
knowledgeSocketIO = module.knowledgeSocketIO
|
||||||
|
taskUpdateSocketIO = module.taskUpdateSocketIO
|
||||||
|
projectListSocketIO = module.projectListSocketIO
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('creates only a single shared socket instance', () => {
|
||||||
|
// All exported instances should be the same object
|
||||||
|
expect(knowledgeSocketIO).toBe(taskUpdateSocketIO)
|
||||||
|
expect(taskUpdateSocketIO).toBe(projectListSocketIO)
|
||||||
|
expect(knowledgeSocketIO).toBe(projectListSocketIO)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('socket.io is called only once despite multiple exports', () => {
|
||||||
|
// The io function should only be called once to create the shared instance
|
||||||
|
expect(io).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('all services share the same socket connection', () => {
|
||||||
|
// Get the internal socket from each service
|
||||||
|
const knowledgeSocket = knowledgeSocketIO.socket
|
||||||
|
const taskSocket = taskUpdateSocketIO.socket
|
||||||
|
const projectSocket = projectListSocketIO.socket
|
||||||
|
|
||||||
|
// All should reference the same socket instance
|
||||||
|
expect(knowledgeSocket).toBe(taskSocket)
|
||||||
|
expect(taskSocket).toBe(projectSocket)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('operations from different services use the same socket', () => {
|
||||||
|
const mockCallback = vi.fn()
|
||||||
|
|
||||||
|
// Subscribe to events from different service exports
|
||||||
|
knowledgeSocketIO.on('knowledge_update', mockCallback)
|
||||||
|
taskUpdateSocketIO.on('task_update', mockCallback)
|
||||||
|
projectListSocketIO.on('project_update', mockCallback)
|
||||||
|
|
||||||
|
// All operations should use the same underlying socket
|
||||||
|
const socket = knowledgeSocketIO.socket
|
||||||
|
expect(socket.on).toHaveBeenCalledWith('knowledge_update', expect.any(Function))
|
||||||
|
expect(socket.on).toHaveBeenCalledWith('task_update', expect.any(Function))
|
||||||
|
expect(socket.on).toHaveBeenCalledWith('project_update', expect.any(Function))
|
||||||
|
})
|
||||||
|
|
||||||
|
test('disconnecting one service disconnects all', () => {
|
||||||
|
// Disconnect using one service
|
||||||
|
knowledgeSocketIO.disconnect()
|
||||||
|
|
||||||
|
// Check that the shared socket was disconnected
|
||||||
|
const socket = knowledgeSocketIO.socket
|
||||||
|
expect(socket.disconnect).toHaveBeenCalledTimes(1)
|
||||||
|
|
||||||
|
// Verify all services report as disconnected
|
||||||
|
expect(knowledgeSocketIO.isConnected()).toBe(false)
|
||||||
|
expect(taskUpdateSocketIO.isConnected()).toBe(false)
|
||||||
|
expect(projectListSocketIO.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('operation tracking is shared across all service exports', () => {
|
||||||
|
// Add operation from one service
|
||||||
|
const operationId = 'test-op-123'
|
||||||
|
knowledgeSocketIO.addOperation(operationId)
|
||||||
|
|
||||||
|
// Check if operation is tracked in all services
|
||||||
|
expect(knowledgeSocketIO.isOwnOperation(operationId)).toBe(true)
|
||||||
|
expect(taskUpdateSocketIO.isOwnOperation(operationId)).toBe(true)
|
||||||
|
expect(projectListSocketIO.isOwnOperation(operationId)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('removing operation from one service removes from all', () => {
|
||||||
|
const operationId = 'test-op-456'
|
||||||
|
|
||||||
|
// Add operation
|
||||||
|
taskUpdateSocketIO.addOperation(operationId)
|
||||||
|
expect(knowledgeSocketIO.isOwnOperation(operationId)).toBe(true)
|
||||||
|
|
||||||
|
// Remove operation using different service
|
||||||
|
projectListSocketIO.removeOperation(operationId)
|
||||||
|
|
||||||
|
// Verify removed from all
|
||||||
|
expect(knowledgeSocketIO.isOwnOperation(operationId)).toBe(false)
|
||||||
|
expect(taskUpdateSocketIO.isOwnOperation(operationId)).toBe(false)
|
||||||
|
expect(projectListSocketIO.isOwnOperation(operationId)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('echo suppression works across all service exports', () => {
|
||||||
|
const operationId = 'echo-test-789'
|
||||||
|
const callback = vi.fn()
|
||||||
|
|
||||||
|
// Subscribe to event
|
||||||
|
knowledgeSocketIO.on('test_event', callback, true) // skipOwnOperations = true
|
||||||
|
|
||||||
|
// Add operation from different service export
|
||||||
|
taskUpdateSocketIO.addOperation(operationId)
|
||||||
|
|
||||||
|
// Simulate event with operation ID
|
||||||
|
const eventData = { operationId, data: 'test' }
|
||||||
|
const handler = knowledgeSocketIO.socket.on.mock.calls[0][1]
|
||||||
|
handler(eventData)
|
||||||
|
|
||||||
|
// Callback should not be called due to echo suppression
|
||||||
|
expect(callback).not.toHaveBeenCalled()
|
||||||
|
|
||||||
|
// Simulate event without operation ID
|
||||||
|
const externalEvent = { data: 'external' }
|
||||||
|
handler(externalEvent)
|
||||||
|
|
||||||
|
// Callback should be called for external events
|
||||||
|
expect(callback).toHaveBeenCalledWith(externalEvent)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('connection state is synchronized across all exports', () => {
|
||||||
|
const mockSocket = knowledgeSocketIO.socket
|
||||||
|
|
||||||
|
// Simulate connected state
|
||||||
|
mockSocket.connected = true
|
||||||
|
expect(knowledgeSocketIO.isConnected()).toBe(true)
|
||||||
|
expect(taskUpdateSocketIO.isConnected()).toBe(true)
|
||||||
|
expect(projectListSocketIO.isConnected()).toBe(true)
|
||||||
|
|
||||||
|
// Simulate disconnected state
|
||||||
|
mockSocket.connected = false
|
||||||
|
expect(knowledgeSocketIO.isConnected()).toBe(false)
|
||||||
|
expect(taskUpdateSocketIO.isConnected()).toBe(false)
|
||||||
|
expect(projectListSocketIO.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('emitting from any service uses the shared socket', () => {
|
||||||
|
const mockSocket = knowledgeSocketIO.socket
|
||||||
|
|
||||||
|
// Emit from different services
|
||||||
|
knowledgeSocketIO.emit('event1', { data: 1 })
|
||||||
|
taskUpdateSocketIO.emit('event2', { data: 2 })
|
||||||
|
projectListSocketIO.emit('event3', { data: 3 })
|
||||||
|
|
||||||
|
// All should use the same socket
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledTimes(3)
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledWith('event1', { data: 1 }, undefined)
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledWith('event2', { data: 2 }, undefined)
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledWith('event3', { data: 3 }, undefined)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('prevents multiple socket connections when switching tabs', () => {
|
||||||
|
// Simulate tab switching by importing the module multiple times
|
||||||
|
// In a real scenario, this would happen when components unmount/remount
|
||||||
|
|
||||||
|
// First "tab"
|
||||||
|
const socket1 = knowledgeSocketIO.socket
|
||||||
|
|
||||||
|
// Simulate switching tabs (in reality, components would remount)
|
||||||
|
// But the shared instance pattern prevents new connections
|
||||||
|
const socket2 = taskUpdateSocketIO.socket
|
||||||
|
const socket3 = projectListSocketIO.socket
|
||||||
|
|
||||||
|
// All should be the same instance
|
||||||
|
expect(socket1).toBe(socket2)
|
||||||
|
expect(socket2).toBe(socket3)
|
||||||
|
|
||||||
|
// io should still only be called once
|
||||||
|
expect(io).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
238
archon-ui-main/test/utils/operationTracker.test.ts
Normal file
238
archon-ui-main/test/utils/operationTracker.test.ts
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
import { describe, test, expect, beforeEach, vi } from 'vitest'
|
||||||
|
import { OperationTracker } from '../../src/utils/operationTracker'
|
||||||
|
|
||||||
|
// Mock uuid
|
||||||
|
vi.mock('uuid', () => ({
|
||||||
|
v4: vi.fn(() => 'mock-uuid-123')
|
||||||
|
}))
|
||||||
|
|
||||||
|
describe('OperationTracker', () => {
|
||||||
|
let tracker: OperationTracker
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tracker = new OperationTracker()
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('generateOperationId', () => {
|
||||||
|
test('generates unique operation IDs', () => {
|
||||||
|
const id1 = tracker.generateOperationId()
|
||||||
|
const id2 = tracker.generateOperationId()
|
||||||
|
|
||||||
|
expect(id1).toBe('mock-uuid-123')
|
||||||
|
expect(id2).toBe('mock-uuid-123') // Same because mock always returns same value
|
||||||
|
|
||||||
|
// In real implementation, these would be different
|
||||||
|
expect(id1).toBeTruthy()
|
||||||
|
expect(id2).toBeTruthy()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns string IDs', () => {
|
||||||
|
const id = tracker.generateOperationId()
|
||||||
|
expect(typeof id).toBe('string')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('addOperation', () => {
|
||||||
|
test('adds operation to tracking set', () => {
|
||||||
|
const operationId = 'test-op-1'
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles multiple operations', () => {
|
||||||
|
tracker.addOperation('op-1')
|
||||||
|
tracker.addOperation('op-2')
|
||||||
|
tracker.addOperation('op-3')
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation('op-1')).toBe(true)
|
||||||
|
expect(tracker.isOwnOperation('op-2')).toBe(true)
|
||||||
|
expect(tracker.isOwnOperation('op-3')).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles duplicate operations gracefully', () => {
|
||||||
|
const operationId = 'duplicate-op'
|
||||||
|
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
tracker.addOperation(operationId) // Add same ID again
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('removeOperation', () => {
|
||||||
|
test('removes operation from tracking', () => {
|
||||||
|
const operationId = 'temp-op'
|
||||||
|
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(true)
|
||||||
|
|
||||||
|
tracker.removeOperation(operationId)
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles removing non-existent operation', () => {
|
||||||
|
// Should not throw error
|
||||||
|
expect(() => {
|
||||||
|
tracker.removeOperation('non-existent')
|
||||||
|
}).not.toThrow()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('removes only specified operation', () => {
|
||||||
|
tracker.addOperation('op-1')
|
||||||
|
tracker.addOperation('op-2')
|
||||||
|
tracker.addOperation('op-3')
|
||||||
|
|
||||||
|
tracker.removeOperation('op-2')
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation('op-1')).toBe(true)
|
||||||
|
expect(tracker.isOwnOperation('op-2')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-3')).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('isOwnOperation', () => {
|
||||||
|
test('returns true for tracked operations', () => {
|
||||||
|
const operationId = 'tracked-op'
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns false for untracked operations', () => {
|
||||||
|
expect(tracker.isOwnOperation('untracked-op')).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns false after operation is removed', () => {
|
||||||
|
const operationId = 'temp-op'
|
||||||
|
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
tracker.removeOperation(operationId)
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('clear', () => {
|
||||||
|
test('removes all tracked operations', () => {
|
||||||
|
tracker.addOperation('op-1')
|
||||||
|
tracker.addOperation('op-2')
|
||||||
|
tracker.addOperation('op-3')
|
||||||
|
|
||||||
|
tracker.clear()
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation('op-1')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-2')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-3')).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with empty tracker', () => {
|
||||||
|
expect(() => tracker.clear()).not.toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('echo suppression scenarios', () => {
|
||||||
|
test('prevents processing own operations', () => {
|
||||||
|
const operationId = tracker.generateOperationId()
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
|
||||||
|
// Simulate receiving an event with our operation ID
|
||||||
|
const event = { operationId, data: 'some data' }
|
||||||
|
|
||||||
|
// Should identify as own operation (skip processing)
|
||||||
|
if (tracker.isOwnOperation(event.operationId)) {
|
||||||
|
// Skip processing
|
||||||
|
expect(true).toBe(true) // Operation should be skipped
|
||||||
|
} else {
|
||||||
|
// Process event
|
||||||
|
expect(false).toBe(true) // Should not reach here
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows processing external operations', () => {
|
||||||
|
const externalOpId = 'external-op-123'
|
||||||
|
|
||||||
|
// Simulate receiving an event from another client
|
||||||
|
const event = { operationId: externalOpId, data: 'external data' }
|
||||||
|
|
||||||
|
// Should not identify as own operation
|
||||||
|
if (!tracker.isOwnOperation(event.operationId)) {
|
||||||
|
// Process event
|
||||||
|
expect(true).toBe(true) // Operation should be processed
|
||||||
|
} else {
|
||||||
|
// Skip processing
|
||||||
|
expect(false).toBe(true) // Should not reach here
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('cleanup patterns', () => {
|
||||||
|
test('supports operation cleanup after completion', () => {
|
||||||
|
const operationId = tracker.generateOperationId()
|
||||||
|
tracker.addOperation(operationId)
|
||||||
|
|
||||||
|
// Simulate operation completion
|
||||||
|
setTimeout(() => {
|
||||||
|
tracker.removeOperation(operationId)
|
||||||
|
}, 100)
|
||||||
|
|
||||||
|
// Initially tracked
|
||||||
|
expect(tracker.isOwnOperation(operationId)).toBe(true)
|
||||||
|
|
||||||
|
// After cleanup (would be false after timeout)
|
||||||
|
// Note: In real tests, would use fake timers or promises
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles batch cleanup', () => {
|
||||||
|
const operations = ['op-1', 'op-2', 'op-3', 'op-4', 'op-5']
|
||||||
|
|
||||||
|
// Add all operations
|
||||||
|
operations.forEach(op => tracker.addOperation(op))
|
||||||
|
|
||||||
|
// Remove specific operations
|
||||||
|
tracker.removeOperation('op-2')
|
||||||
|
tracker.removeOperation('op-4')
|
||||||
|
|
||||||
|
expect(tracker.isOwnOperation('op-1')).toBe(true)
|
||||||
|
expect(tracker.isOwnOperation('op-2')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-3')).toBe(true)
|
||||||
|
expect(tracker.isOwnOperation('op-4')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-5')).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('memory management', () => {
|
||||||
|
test('does not accumulate unlimited operations', () => {
|
||||||
|
// Add many operations
|
||||||
|
for (let i = 0; i < 1000; i++) {
|
||||||
|
tracker.addOperation(`op-${i}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear to prevent memory leaks
|
||||||
|
tracker.clear()
|
||||||
|
|
||||||
|
// Verify all cleared
|
||||||
|
expect(tracker.isOwnOperation('op-0')).toBe(false)
|
||||||
|
expect(tracker.isOwnOperation('op-999')).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('supports operation TTL pattern', () => {
|
||||||
|
// This test demonstrates a pattern for auto-cleanup
|
||||||
|
const operationWithTTL = (id: string, ttlMs: number) => {
|
||||||
|
tracker.addOperation(id)
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
tracker.removeOperation(id)
|
||||||
|
}, ttlMs)
|
||||||
|
}
|
||||||
|
|
||||||
|
const opId = 'ttl-op'
|
||||||
|
operationWithTTL(opId, 5000) // 5 second TTL
|
||||||
|
|
||||||
|
// Initially tracked
|
||||||
|
expect(tracker.isOwnOperation(opId)).toBe(true)
|
||||||
|
// Would be removed after TTL expires
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -316,8 +316,8 @@ def register_task_tools(mcp: FastMCP):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
task_id: UUID of the task to update
|
task_id: UUID of the task to update
|
||||||
title: New title (optional)
|
title: New task title (optional)
|
||||||
description: New description (optional)
|
description: New task description (optional)
|
||||||
status: New status - "todo" | "doing" | "review" | "done" (optional)
|
status: New status - "todo" | "doing" | "review" | "done" (optional)
|
||||||
assignee: New assignee (optional)
|
assignee: New assignee (optional)
|
||||||
task_order: New priority order (optional)
|
task_order: New priority order (optional)
|
||||||
@@ -358,7 +358,7 @@ def register_task_tools(mcp: FastMCP):
|
|||||||
if not update_fields:
|
if not update_fields:
|
||||||
return MCPErrorFormatter.format_error(
|
return MCPErrorFormatter.format_error(
|
||||||
error_type="validation_error",
|
error_type="validation_error",
|
||||||
message="No fields to update",
|
message="No fields provided to update",
|
||||||
suggestion="Provide at least one field to update",
|
suggestion="Provide at least one field to update",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -18,9 +18,7 @@ from pydantic import BaseModel
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Import Socket.IO instance
|
# Import Socket.IO instance
|
||||||
from ..socketio_app import get_socketio_instance
|
from ..socketio_app import sio
|
||||||
|
|
||||||
sio = get_socketio_instance()
|
|
||||||
|
|
||||||
# Create router
|
# Create router
|
||||||
router = APIRouter(prefix="/api/agent-chat", tags=["agent-chat"])
|
router = APIRouter(prefix="/api/agent-chat", tags=["agent-chat"])
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -8,12 +8,10 @@ No other modules should import from this file.
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from ..config.logfire_config import get_logger
|
from ..config.logfire_config import get_logger
|
||||||
from ..socketio_app import get_socketio_instance
|
from ..socketio_app import sio
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
# Get Socket.IO instance
|
|
||||||
sio = get_socketio_instance()
|
|
||||||
|
|
||||||
|
|
||||||
# Core broadcast functions
|
# Core broadcast functions
|
||||||
|
|||||||
@@ -13,13 +13,10 @@ from ..config.logfire_config import get_logger
|
|||||||
from ..services.background_task_manager import get_task_manager
|
from ..services.background_task_manager import get_task_manager
|
||||||
from ..services.projects.project_service import ProjectService
|
from ..services.projects.project_service import ProjectService
|
||||||
from ..services.projects.source_linking_service import SourceLinkingService
|
from ..services.projects.source_linking_service import SourceLinkingService
|
||||||
from ..socketio_app import get_socketio_instance
|
from ..socketio_app import sio
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
# Get Socket.IO instance
|
|
||||||
sio = get_socketio_instance()
|
|
||||||
logger.info(f"🔗 [SOCKETIO] Socket.IO instance ID: {id(sio)}")
|
|
||||||
|
|
||||||
# Rate limiting for Socket.IO broadcasts
|
# Rate limiting for Socket.IO broadcasts
|
||||||
_last_broadcast_times: dict[str, float] = {}
|
_last_broadcast_times: dict[str, float] = {}
|
||||||
|
|||||||
@@ -217,14 +217,21 @@ class CodeExtractionService:
|
|||||||
Returns:
|
Returns:
|
||||||
List of code blocks with metadata
|
List of code blocks with metadata
|
||||||
"""
|
"""
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
|
||||||
# Progress will be reported during the loop below
|
# Progress will be reported during the loop below
|
||||||
|
|
||||||
all_code_blocks = []
|
all_code_blocks = []
|
||||||
total_docs = len(crawl_results)
|
total_docs = len(crawl_results)
|
||||||
completed_docs = 0
|
completed_docs = 0
|
||||||
|
|
||||||
|
# PERFORMANCE: Track extraction time per document
|
||||||
|
MAX_EXTRACTION_TIME_PER_DOC = 5.0 # 5 seconds max per document
|
||||||
|
|
||||||
for doc in crawl_results:
|
for doc in crawl_results:
|
||||||
try:
|
try:
|
||||||
|
doc_start_time = time.time()
|
||||||
source_url = doc["url"]
|
source_url = doc["url"]
|
||||||
html_content = doc.get("html", "")
|
html_content = doc.get("html", "")
|
||||||
md = doc.get("markdown", "")
|
md = doc.get("markdown", "")
|
||||||
@@ -234,9 +241,7 @@ class CodeExtractionService:
|
|||||||
f"Document content check | url={source_url} | has_html={bool(html_content)} | has_markdown={bool(md)} | html_len={len(html_content) if html_content else 0} | md_len={len(md) if md else 0}"
|
f"Document content check | url={source_url} | has_html={bool(html_content)} | has_markdown={bool(md)} | html_len={len(html_content) if html_content else 0} | md_len={len(md) if md else 0}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get dynamic minimum length based on document context
|
# Dynamic minimum length is handled inside the extraction methods
|
||||||
# Extract some context from the document for analysis
|
|
||||||
doc_context = md[:1000] if md else html_content[:1000] if html_content else ""
|
|
||||||
|
|
||||||
# Check markdown first to see if it has code blocks
|
# Check markdown first to see if it has code blocks
|
||||||
if md:
|
if md:
|
||||||
@@ -287,15 +292,32 @@ class CodeExtractionService:
|
|||||||
|
|
||||||
# If not a text file or no code blocks found, try HTML extraction first
|
# If not a text file or no code blocks found, try HTML extraction first
|
||||||
if len(code_blocks) == 0 and html_content and not is_text_file:
|
if len(code_blocks) == 0 and html_content and not is_text_file:
|
||||||
safe_logfire_info(
|
# PERFORMANCE: Check if we've already spent too much time on this document
|
||||||
f"Trying HTML extraction first | url={source_url} | html_length={len(html_content)}"
|
elapsed_time = time.time() - doc_start_time
|
||||||
)
|
if elapsed_time > MAX_EXTRACTION_TIME_PER_DOC:
|
||||||
html_code_blocks = await self._extract_html_code_blocks(html_content)
|
|
||||||
if html_code_blocks:
|
|
||||||
code_blocks = html_code_blocks
|
|
||||||
safe_logfire_info(
|
safe_logfire_info(
|
||||||
f"Found {len(code_blocks)} code blocks from HTML | url={source_url}"
|
f"⏱️ Skipping HTML extraction for {source_url} - already spent {elapsed_time:.1f}s"
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
safe_logfire_info(
|
||||||
|
f"Trying HTML extraction first | url={source_url} | html_length={len(html_content)}"
|
||||||
|
)
|
||||||
|
# Create a timeout for HTML extraction
|
||||||
|
remaining_time = MAX_EXTRACTION_TIME_PER_DOC - elapsed_time
|
||||||
|
try:
|
||||||
|
html_code_blocks = await asyncio.wait_for(
|
||||||
|
self._extract_html_code_blocks(html_content, source_url),
|
||||||
|
timeout=remaining_time
|
||||||
|
)
|
||||||
|
if html_code_blocks:
|
||||||
|
code_blocks = html_code_blocks
|
||||||
|
safe_logfire_info(
|
||||||
|
f"Found {len(code_blocks)} code blocks from HTML | url={source_url}"
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
safe_logfire_info(
|
||||||
|
f"⏱️ HTML extraction timed out after {remaining_time:.1f}s for {source_url}"
|
||||||
|
)
|
||||||
|
|
||||||
# If still no code blocks, try markdown extraction as fallback
|
# If still no code blocks, try markdown extraction as fallback
|
||||||
if len(code_blocks) == 0 and md and "```" in md:
|
if len(code_blocks) == 0 and md and "```" in md:
|
||||||
@@ -322,6 +344,14 @@ class CodeExtractionService:
|
|||||||
|
|
||||||
# Update progress only after completing document extraction
|
# Update progress only after completing document extraction
|
||||||
completed_docs += 1
|
completed_docs += 1
|
||||||
|
extraction_time = time.time() - doc_start_time
|
||||||
|
if extraction_time > 2.0: # Log slow extractions
|
||||||
|
safe_logfire_info(
|
||||||
|
f"⏱️ Document extraction took {extraction_time:.1f}s | url={source_url} | "
|
||||||
|
f"html_size={len(html_content) if html_content else 0} | "
|
||||||
|
f"blocks_found={len([b for b in all_code_blocks if b['source_url'] == source_url])}"
|
||||||
|
)
|
||||||
|
|
||||||
if progress_callback and total_docs > 0:
|
if progress_callback and total_docs > 0:
|
||||||
# Calculate progress within the specified range
|
# Calculate progress within the specified range
|
||||||
raw_progress = completed_docs / total_docs
|
raw_progress = completed_docs / total_docs
|
||||||
@@ -343,13 +373,14 @@ class CodeExtractionService:
|
|||||||
|
|
||||||
return all_code_blocks
|
return all_code_blocks
|
||||||
|
|
||||||
async def _extract_html_code_blocks(self, content: str) -> list[dict[str, Any]]:
|
async def _extract_html_code_blocks(self, content: str, source_url: str = "") -> list[dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Extract code blocks from HTML patterns in content.
|
Extract code blocks from HTML patterns in content.
|
||||||
This is a fallback when markdown conversion didn't preserve code blocks.
|
This is a fallback when markdown conversion didn't preserve code blocks.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
content: The content to search for HTML code patterns
|
content: The content to search for HTML code patterns
|
||||||
|
source_url: The URL of the document being processed
|
||||||
min_length: Minimum length for code blocks
|
min_length: Minimum length for code blocks
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -359,6 +390,20 @@ class CodeExtractionService:
|
|||||||
|
|
||||||
# Add detailed logging
|
# Add detailed logging
|
||||||
safe_logfire_info(f"Processing HTML of length {len(content)} for code extraction")
|
safe_logfire_info(f"Processing HTML of length {len(content)} for code extraction")
|
||||||
|
|
||||||
|
# PERFORMANCE OPTIMIZATION: Skip extremely large HTML files or chunk them
|
||||||
|
MAX_HTML_SIZE = 1_000_000 # 1MB limit for single-pass processing (increased from 500KB)
|
||||||
|
if len(content) > MAX_HTML_SIZE:
|
||||||
|
safe_logfire_info(
|
||||||
|
f"⚠️ HTML content is very large ({len(content)} bytes). "
|
||||||
|
f"Limiting to first {MAX_HTML_SIZE} bytes to prevent timeout."
|
||||||
|
)
|
||||||
|
# For very large files, focus on the first portion where code examples are likely to be
|
||||||
|
content = content[:MAX_HTML_SIZE]
|
||||||
|
# Try to find a good cutoff point (end of a tag)
|
||||||
|
last_tag_end = content.rfind('>')
|
||||||
|
if last_tag_end > MAX_HTML_SIZE - 1000:
|
||||||
|
content = content[:last_tag_end + 1]
|
||||||
|
|
||||||
# Check if we have actual content
|
# Check if we have actual content
|
||||||
if len(content) < 1000:
|
if len(content) < 1000:
|
||||||
@@ -510,9 +555,71 @@ class CodeExtractionService:
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for pattern_tuple in patterns:
|
# PERFORMANCE: Early exit checks to avoid unnecessary regex processing
|
||||||
|
# Check more content (20KB instead of 5KB) and add URL-based exceptions
|
||||||
|
check_size = min(20000, len(content)) # Check first 20KB or entire content if smaller
|
||||||
|
has_code_indicators = any(indicator in content[:check_size] for indicator in
|
||||||
|
['<pre', '<code', 'language-', 'hljs', 'prism', 'shiki', 'highlight'])
|
||||||
|
|
||||||
|
# Never skip certain documentation sites that we know have code
|
||||||
|
is_known_code_site = any(domain in source_url.lower() for domain in
|
||||||
|
['milkdown', 'github.com', 'gitlab', 'docs.', 'dev.', 'api.'])
|
||||||
|
|
||||||
|
if not has_code_indicators and not is_known_code_site:
|
||||||
|
safe_logfire_info(f"No code indicators found in first {check_size} chars and not a known code site, skipping HTML extraction | url={source_url}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
if is_known_code_site and not has_code_indicators:
|
||||||
|
safe_logfire_info(f"Known code site but no indicators in first {check_size} chars, continuing anyway | url={source_url}")
|
||||||
|
|
||||||
|
# PERFORMANCE: Limit number of patterns to check based on detected libraries
|
||||||
|
patterns_to_check = []
|
||||||
|
content_lower = content[:10000].lower() # Check first 10KB for library detection
|
||||||
|
|
||||||
|
# Selectively add patterns based on what's detected
|
||||||
|
if 'milkdown' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'milkdown' in p[1]])
|
||||||
|
if 'monaco' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'monaco' in p[1]])
|
||||||
|
if 'codemirror' in content_lower or 'cm-' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'codemirror' in p[1]])
|
||||||
|
if 'prism' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'prism' in p[1]])
|
||||||
|
if 'hljs' in content_lower or 'highlight' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'hljs' in p[1] or 'highlight' in p[1]])
|
||||||
|
if 'shiki' in content_lower or 'astro' in content_lower:
|
||||||
|
patterns_to_check.extend([p for p in patterns if 'shiki' in p[1] or 'astro' in p[1]])
|
||||||
|
|
||||||
|
# Always include standard patterns as fallback (get ALL standard/generic patterns, not just last 5)
|
||||||
|
standard_patterns = [p for p in patterns if any(tag in p[1] for tag in ['standard', 'generic', 'prism', 'hljs'])]
|
||||||
|
patterns_to_check.extend(standard_patterns)
|
||||||
|
|
||||||
|
# Remove duplicates while preserving order
|
||||||
|
seen = set()
|
||||||
|
unique_patterns = []
|
||||||
|
for p in patterns_to_check:
|
||||||
|
if p[1] not in seen:
|
||||||
|
unique_patterns.append(p)
|
||||||
|
seen.add(p[1])
|
||||||
|
patterns_to_check = unique_patterns
|
||||||
|
|
||||||
|
# If we have very few patterns and it's a known code site, add more generic patterns
|
||||||
|
if len(patterns_to_check) < 5 and is_known_code_site:
|
||||||
|
safe_logfire_info(f"Known code site with few patterns ({len(patterns_to_check)}), adding more generic patterns")
|
||||||
|
patterns_to_check = patterns # Use all patterns for known code sites
|
||||||
|
|
||||||
|
safe_logfire_info(f"Checking {len(patterns_to_check)} relevant patterns out of {len(patterns)} total")
|
||||||
|
|
||||||
|
for pattern_tuple in patterns_to_check:
|
||||||
pattern_str, source_type = pattern_tuple
|
pattern_str, source_type = pattern_tuple
|
||||||
matches = list(re.finditer(pattern_str, content, re.DOTALL | re.IGNORECASE))
|
|
||||||
|
# PERFORMANCE: Use re.finditer with smaller chunks for very long content
|
||||||
|
# Only use DOTALL for patterns that really need it (multi-line blocks)
|
||||||
|
flags = re.IGNORECASE
|
||||||
|
if 'monaco' in source_type or 'codemirror' in source_type:
|
||||||
|
flags |= re.DOTALL # These need DOTALL for multi-line matching
|
||||||
|
|
||||||
|
matches = list(re.finditer(pattern_str, content, flags))
|
||||||
|
|
||||||
# Log pattern matches for Milkdown patterns and CodeMirror
|
# Log pattern matches for Milkdown patterns and CodeMirror
|
||||||
if matches and (
|
if matches and (
|
||||||
|
|||||||
@@ -433,6 +433,9 @@ class CrawlingService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Complete - send both the progress update and completion event
|
# Complete - send both the progress update and completion event
|
||||||
|
# CRITICAL: This is the ONLY place that should send status="completed"!
|
||||||
|
# All crawl strategies (batch, recursive, etc.) should use "finished" or other words.
|
||||||
|
# The frontend disconnects when it sees status="completed", so this must be the final step.
|
||||||
await update_mapped_progress(
|
await update_mapped_progress(
|
||||||
"completed",
|
"completed",
|
||||||
100,
|
100,
|
||||||
|
|||||||
@@ -73,7 +73,8 @@ class BatchCrawlStrategy:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For non-critical errors (e.g., network issues), use defaults but log prominently
|
# For non-critical errors (e.g., network issues), use defaults but log prominently
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Failed to load crawl settings from database: {e}, using defaults", exc_info=True
|
f"Failed to load crawl settings from database: {e}, using defaults",
|
||||||
|
exc_info=True
|
||||||
)
|
)
|
||||||
batch_size = 50
|
batch_size = 50
|
||||||
if max_concurrent is None:
|
if max_concurrent is None:
|
||||||
@@ -98,101 +99,93 @@ class BatchCrawlStrategy:
|
|||||||
wait_for_images=False, # Skip images for faster crawling
|
wait_for_images=False, # Skip images for faster crawling
|
||||||
scan_full_page=True, # Trigger lazy loading
|
scan_full_page=True, # Trigger lazy loading
|
||||||
exclude_all_images=False,
|
exclude_all_images=False,
|
||||||
remove_overlay_elements=True,
|
|
||||||
process_iframes=True,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Configuration for regular batch crawling
|
# Regular sites use standard configuration
|
||||||
crawl_config = CrawlerRunConfig(
|
crawl_config = CrawlerRunConfig(
|
||||||
cache_mode=CacheMode.BYPASS,
|
cache_mode=CacheMode.BYPASS,
|
||||||
stream=True, # Enable streaming
|
stream=True,
|
||||||
markdown_generator=self.markdown_generator,
|
markdown_generator=self.markdown_generator,
|
||||||
wait_until=settings.get("CRAWL_WAIT_STRATEGY", "domcontentloaded"),
|
wait_until=settings.get("CRAWL_WAIT_STRATEGY", "domcontentloaded"),
|
||||||
page_timeout=int(settings.get("CRAWL_PAGE_TIMEOUT", "45000")),
|
page_timeout=int(settings.get("CRAWL_PAGE_TIMEOUT", "30000")),
|
||||||
delay_before_return_html=float(settings.get("CRAWL_DELAY_BEFORE_HTML", "0.5")),
|
delay_before_return_html=float(settings.get("CRAWL_DELAY_BEFORE_HTML", "0.5")),
|
||||||
scan_full_page=True,
|
wait_for_images=False,
|
||||||
|
scan_full_page=False, # Don't scan full page for non-doc sites
|
||||||
|
exclude_all_images=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Transform URLs if needed
|
||||||
|
processed_urls = [transform_url_func(url) for url in urls]
|
||||||
|
|
||||||
|
# Create memory adaptive dispatcher
|
||||||
dispatcher = MemoryAdaptiveDispatcher(
|
dispatcher = MemoryAdaptiveDispatcher(
|
||||||
memory_threshold_percent=memory_threshold,
|
max_sessions=max_concurrent,
|
||||||
|
memory_threshold_mb=memory_threshold,
|
||||||
check_interval=check_interval,
|
check_interval=check_interval,
|
||||||
max_session_permit=max_concurrent,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def report_progress(percentage: int, message: str, **kwargs):
|
# Crawl URLs in batches using arun_many
|
||||||
"""Helper to report progress if callback is available"""
|
results = []
|
||||||
|
total_urls = len(processed_urls)
|
||||||
|
|
||||||
|
for batch_start in range(0, total_urls, batch_size):
|
||||||
|
batch_end = min(batch_start + batch_size, total_urls)
|
||||||
|
batch = processed_urls[batch_start:batch_end]
|
||||||
|
|
||||||
|
# Calculate progress for this batch
|
||||||
if progress_callback:
|
if progress_callback:
|
||||||
step_info = {"currentStep": message, "stepMessage": message, **kwargs}
|
batch_progress = start_progress + ((batch_start / total_urls) * (end_progress - start_progress))
|
||||||
await progress_callback("crawling", percentage, message, step_info=step_info)
|
await progress_callback(
|
||||||
|
"batch_crawling",
|
||||||
total_urls = len(urls)
|
int(batch_progress),
|
||||||
await report_progress(start_progress, f"Starting to crawl {total_urls} URLs...")
|
f"Crawling batch {batch_start // batch_size + 1} ({batch_start + 1}-{batch_end}/{total_urls} URLs)"
|
||||||
|
|
||||||
# Use configured batch size
|
|
||||||
successful_results = []
|
|
||||||
processed = 0
|
|
||||||
|
|
||||||
# Transform all URLs at the beginning
|
|
||||||
url_mapping = {} # Map transformed URLs back to original
|
|
||||||
transformed_urls = []
|
|
||||||
for url in urls:
|
|
||||||
transformed = transform_url_func(url)
|
|
||||||
transformed_urls.append(transformed)
|
|
||||||
url_mapping[transformed] = url
|
|
||||||
|
|
||||||
for i in range(0, total_urls, batch_size):
|
|
||||||
batch_urls = transformed_urls[i : i + batch_size]
|
|
||||||
batch_start = i
|
|
||||||
batch_end = min(i + batch_size, total_urls)
|
|
||||||
|
|
||||||
# Report batch start with smooth progress
|
|
||||||
progress_percentage = start_progress + int(
|
|
||||||
(i / total_urls) * (end_progress - start_progress)
|
|
||||||
)
|
|
||||||
await report_progress(
|
|
||||||
progress_percentage,
|
|
||||||
f"Processing batch {batch_start + 1}-{batch_end} of {total_urls} URLs...",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Crawl this batch using arun_many with streaming
|
|
||||||
logger.info(
|
|
||||||
f"Starting parallel crawl of batch {batch_start + 1}-{batch_end} ({len(batch_urls)} URLs)"
|
|
||||||
)
|
|
||||||
batch_results = await self.crawler.arun_many(
|
|
||||||
urls=batch_urls, config=crawl_config, dispatcher=dispatcher
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle streaming results
|
|
||||||
async for result in batch_results:
|
|
||||||
processed += 1
|
|
||||||
if result.success and result.markdown:
|
|
||||||
# Map back to original URL
|
|
||||||
original_url = url_mapping.get(result.url, result.url)
|
|
||||||
successful_results.append({
|
|
||||||
"url": original_url,
|
|
||||||
"markdown": result.markdown,
|
|
||||||
"html": result.html, # Use raw HTML
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
f"Failed to crawl {result.url}: {getattr(result, 'error_message', 'Unknown error')}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Report individual URL progress with smooth increments
|
|
||||||
progress_percentage = start_progress + int(
|
|
||||||
(processed / total_urls) * (end_progress - start_progress)
|
|
||||||
)
|
)
|
||||||
# Report more frequently for smoother progress
|
|
||||||
if (
|
|
||||||
processed % 5 == 0 or processed == total_urls
|
|
||||||
): # Report every 5 URLs or at the end
|
|
||||||
await report_progress(
|
|
||||||
progress_percentage,
|
|
||||||
f"Crawled {processed}/{total_urls} pages ({len(successful_results)} successful)",
|
|
||||||
)
|
|
||||||
|
|
||||||
await report_progress(
|
# Run batch crawl
|
||||||
end_progress,
|
try:
|
||||||
f"Batch crawling completed: {len(successful_results)}/{total_urls} pages successful",
|
batch_results = await self.crawler.arun_many(
|
||||||
)
|
batch,
|
||||||
return successful_results
|
config=crawl_config,
|
||||||
|
dispatcher=dispatcher
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process results
|
||||||
|
for result in batch_results:
|
||||||
|
if result.success:
|
||||||
|
results.append({
|
||||||
|
"url": result.url,
|
||||||
|
"markdown": result.markdown_v2.raw_markdown if result.markdown_v2 else "",
|
||||||
|
"success": True,
|
||||||
|
"metadata": result.extracted_content if hasattr(result, 'extracted_content') else {}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to crawl {result.url}: {result.error_message}")
|
||||||
|
results.append({
|
||||||
|
"url": result.url,
|
||||||
|
"markdown": "",
|
||||||
|
"success": False,
|
||||||
|
"error": result.error_message
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Batch crawl error: {e}", exc_info=True)
|
||||||
|
# Add failed results for this batch
|
||||||
|
for url in batch:
|
||||||
|
results.append({
|
||||||
|
"url": url,
|
||||||
|
"markdown": "",
|
||||||
|
"success": False,
|
||||||
|
"error": str(e)
|
||||||
|
})
|
||||||
|
|
||||||
|
# Update progress after batch completion
|
||||||
|
# IMPORTANT: Use "finished" not "completed" - only the final orchestrator should send "completed"
|
||||||
|
if progress_callback:
|
||||||
|
batch_progress = start_progress + ((batch_end / total_urls) * (end_progress - start_progress))
|
||||||
|
await progress_callback(
|
||||||
|
"batch_crawling",
|
||||||
|
int(batch_progress),
|
||||||
|
f"Finished batch {batch_start // batch_size + 1}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return results
|
||||||
@@ -60,7 +60,7 @@ class RecursiveCrawlStrategy:
|
|||||||
if not self.crawler:
|
if not self.crawler:
|
||||||
logger.error("No crawler instance available for recursive crawling")
|
logger.error("No crawler instance available for recursive crawling")
|
||||||
if progress_callback:
|
if progress_callback:
|
||||||
await progress_callback("error", 0, "Crawler not available")
|
await progress_callback("error", 0, "Crawler not available", step_info={"currentStep": "error", "stepMessage": "Crawler not available"})
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Load settings from database - fail fast on configuration errors
|
# Load settings from database - fail fast on configuration errors
|
||||||
@@ -78,7 +78,8 @@ class RecursiveCrawlStrategy:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For non-critical errors (e.g., network issues), use defaults but log prominently
|
# For non-critical errors (e.g., network issues), use defaults but log prominently
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Failed to load crawl settings from database: {e}, using defaults", exc_info=True
|
f"Failed to load crawl settings from database: {e}, using defaults",
|
||||||
|
exc_info=True
|
||||||
)
|
)
|
||||||
batch_size = 50
|
batch_size = 50
|
||||||
if max_concurrent is None:
|
if max_concurrent is None:
|
||||||
@@ -126,11 +127,19 @@ class RecursiveCrawlStrategy:
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def report_progress(percentage: int, message: str, **kwargs):
|
async def report_progress(percentage: int, message: str, **kwargs):
|
||||||
"""Helper to report progress if callback is available"""
|
"""Helper to report progress if callback is available
|
||||||
|
|
||||||
|
IMPORTANT: Never use "complete" or "completed" in messages here!
|
||||||
|
This is just an intermediate step in the overall crawl process.
|
||||||
|
Only the final orchestrator should send "completed" status.
|
||||||
|
"""
|
||||||
if progress_callback:
|
if progress_callback:
|
||||||
# Add step information for multi-progress tracking
|
# Add step information for multi-progress tracking
|
||||||
step_info = {"currentStep": message, "stepMessage": message, **kwargs}
|
step_info = {
|
||||||
await progress_callback("crawling", percentage, message, **step_info)
|
"currentStep": message,
|
||||||
|
"stepMessage": message
|
||||||
|
}
|
||||||
|
await progress_callback("crawling", percentage, message, step_info=step_info, **kwargs)
|
||||||
|
|
||||||
visited = set()
|
visited = set()
|
||||||
|
|
||||||
@@ -169,14 +178,6 @@ class RecursiveCrawlStrategy:
|
|||||||
batch_urls = urls_to_crawl[batch_idx : batch_idx + batch_size]
|
batch_urls = urls_to_crawl[batch_idx : batch_idx + batch_size]
|
||||||
batch_end_idx = min(batch_idx + batch_size, len(urls_to_crawl))
|
batch_end_idx = min(batch_idx + batch_size, len(urls_to_crawl))
|
||||||
|
|
||||||
# Transform URLs and create mapping for this batch
|
|
||||||
url_mapping = {}
|
|
||||||
transformed_batch_urls = []
|
|
||||||
for url in batch_urls:
|
|
||||||
transformed = transform_url_func(url)
|
|
||||||
transformed_batch_urls.append(transformed)
|
|
||||||
url_mapping[transformed] = url
|
|
||||||
|
|
||||||
# Calculate progress for this batch within the depth
|
# Calculate progress for this batch within the depth
|
||||||
batch_progress = depth_start + int(
|
batch_progress = depth_start + int(
|
||||||
(batch_idx / len(urls_to_crawl)) * (depth_end - depth_start)
|
(batch_idx / len(urls_to_crawl)) * (depth_end - depth_start)
|
||||||
@@ -191,14 +192,20 @@ class RecursiveCrawlStrategy:
|
|||||||
# Use arun_many for native parallel crawling with streaming
|
# Use arun_many for native parallel crawling with streaming
|
||||||
logger.info(f"Starting parallel crawl of {len(batch_urls)} URLs with arun_many")
|
logger.info(f"Starting parallel crawl of {len(batch_urls)} URLs with arun_many")
|
||||||
batch_results = await self.crawler.arun_many(
|
batch_results = await self.crawler.arun_many(
|
||||||
urls=transformed_batch_urls, config=run_config, dispatcher=dispatcher
|
urls=batch_urls,
|
||||||
|
config=run_config,
|
||||||
|
dispatcher=dispatcher
|
||||||
)
|
)
|
||||||
|
|
||||||
# Handle streaming results from arun_many
|
# Handle streaming results from arun_many
|
||||||
i = 0
|
i = 0
|
||||||
async for result in batch_results:
|
async for result in batch_results:
|
||||||
# Map back to original URL using the mapping dict
|
# Map back to original URL if transformed
|
||||||
original_url = url_mapping.get(result.url, result.url)
|
original_url = result.url
|
||||||
|
for orig_url in batch_urls:
|
||||||
|
if transform_url_func(orig_url) == result.url:
|
||||||
|
original_url = orig_url
|
||||||
|
break
|
||||||
|
|
||||||
norm_url = normalize_url(original_url)
|
norm_url = normalize_url(original_url)
|
||||||
visited.add(norm_url)
|
visited.add(norm_url)
|
||||||
@@ -213,14 +220,14 @@ class RecursiveCrawlStrategy:
|
|||||||
depth_successful += 1
|
depth_successful += 1
|
||||||
|
|
||||||
# Find internal links for next depth
|
# Find internal links for next depth
|
||||||
links = getattr(result, "links", {}) or {}
|
for link in result.links.get("internal", []):
|
||||||
for link in links.get("internal", []):
|
|
||||||
next_url = normalize_url(link["href"])
|
next_url = normalize_url(link["href"])
|
||||||
# Skip binary files and already visited URLs
|
# Skip binary files and already visited URLs
|
||||||
is_binary = self.url_handler.is_binary_file(next_url)
|
if next_url not in visited and not self.url_handler.is_binary_file(
|
||||||
if next_url not in visited and not is_binary:
|
next_url
|
||||||
|
):
|
||||||
next_level_urls.add(next_url)
|
next_level_urls.add(next_url)
|
||||||
elif is_binary:
|
elif self.url_handler.is_binary_file(next_url):
|
||||||
logger.debug(f"Skipping binary file from crawl queue: {next_url}")
|
logger.debug(f"Skipping binary file from crawl queue: {next_url}")
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
@@ -243,14 +250,16 @@ class RecursiveCrawlStrategy:
|
|||||||
|
|
||||||
current_urls = next_level_urls
|
current_urls = next_level_urls
|
||||||
|
|
||||||
# Report completion of this depth
|
# Report completion of this depth - IMPORTANT: Use "finished" not "completed"!
|
||||||
await report_progress(
|
await report_progress(
|
||||||
depth_end,
|
depth_end,
|
||||||
f"Depth {depth + 1} completed: {depth_successful} pages crawled, {len(next_level_urls)} URLs found for next depth",
|
f"Depth {depth + 1} finished: {depth_successful} pages crawled, {len(next_level_urls)} URLs found for next depth",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# IMPORTANT: Use "finished" not "complete" - only the final orchestrator should say "completed"
|
||||||
await report_progress(
|
await report_progress(
|
||||||
end_progress,
|
end_progress,
|
||||||
f"Recursive crawling completed: {len(results_all)} total pages crawled across {max_depth} depth levels",
|
f"Recursive crawl finished: {len(results_all)} pages successfully crawled",
|
||||||
)
|
)
|
||||||
return results_all
|
|
||||||
|
return results_all
|
||||||
@@ -1,472 +1,473 @@
|
|||||||
"""
|
"""
|
||||||
Knowledge Item Service
|
Knowledge Item Service
|
||||||
|
|
||||||
Handles all knowledge item CRUD operations and data transformations.
|
Handles all knowledge item CRUD operations and data transformations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from ...config.logfire_config import safe_logfire_error, safe_logfire_info
|
from ...config.logfire_config import safe_logfire_error, safe_logfire_info
|
||||||
|
|
||||||
|
|
||||||
class KnowledgeItemService:
|
class KnowledgeItemService:
|
||||||
"""
|
"""
|
||||||
Service for managing knowledge items including listing, filtering, updating, and deletion.
|
Service for managing knowledge items including listing, filtering, updating, and deletion.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, supabase_client):
|
def __init__(self, supabase_client):
|
||||||
"""
|
"""
|
||||||
Initialize the knowledge item service.
|
Initialize the knowledge item service.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
supabase_client: The Supabase client for database operations
|
supabase_client: The Supabase client for database operations
|
||||||
"""
|
"""
|
||||||
self.supabase = supabase_client
|
self.supabase = supabase_client
|
||||||
|
|
||||||
async def list_items(
|
async def list_items(
|
||||||
self,
|
self,
|
||||||
page: int = 1,
|
page: int = 1,
|
||||||
per_page: int = 20,
|
per_page: int = 20,
|
||||||
knowledge_type: str | None = None,
|
knowledge_type: str | None = None,
|
||||||
search: str | None = None,
|
search: str | None = None,
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
List knowledge items with pagination and filtering.
|
List knowledge items with pagination and filtering.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
page: Page number (1-based)
|
page: Page number (1-based)
|
||||||
per_page: Items per page
|
per_page: Items per page
|
||||||
knowledge_type: Filter by knowledge type
|
knowledge_type: Filter by knowledge type
|
||||||
search: Search term for filtering
|
search: Search term for filtering
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict containing items, pagination info, and total count
|
Dict containing items, pagination info, and total count
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Build the query with filters at database level for better performance
|
# Build the query with filters at database level for better performance
|
||||||
query = self.supabase.from_("archon_sources").select("*")
|
query = self.supabase.from_("archon_sources").select("*")
|
||||||
|
|
||||||
# Apply knowledge type filter at database level if provided
|
# Apply knowledge type filter at database level if provided
|
||||||
if knowledge_type:
|
if knowledge_type:
|
||||||
query = query.eq("metadata->>knowledge_type", knowledge_type)
|
query = query.eq("metadata->>knowledge_type", knowledge_type)
|
||||||
|
|
||||||
# Apply search filter at database level if provided
|
# Apply search filter at database level if provided
|
||||||
if search:
|
if search:
|
||||||
search_pattern = f"%{search}%"
|
search_pattern = f"%{search}%"
|
||||||
query = query.or_(
|
query = query.or_(
|
||||||
f"title.ilike.{search_pattern},summary.ilike.{search_pattern},source_id.ilike.{search_pattern}"
|
f"title.ilike.{search_pattern},summary.ilike.{search_pattern},source_id.ilike.{search_pattern}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get total count before pagination
|
# Get total count before pagination
|
||||||
# Clone the query for counting
|
# Clone the query for counting
|
||||||
count_query = self.supabase.from_("archon_sources").select(
|
count_query = self.supabase.from_("archon_sources").select(
|
||||||
"*", count="exact", head=True
|
"*", count="exact", head=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply same filters to count query
|
# Apply same filters to count query
|
||||||
if knowledge_type:
|
if knowledge_type:
|
||||||
count_query = count_query.eq("metadata->>knowledge_type", knowledge_type)
|
count_query = count_query.eq("metadata->>knowledge_type", knowledge_type)
|
||||||
|
|
||||||
if search:
|
if search:
|
||||||
search_pattern = f"%{search}%"
|
search_pattern = f"%{search}%"
|
||||||
count_query = count_query.or_(
|
count_query = count_query.or_(
|
||||||
f"title.ilike.{search_pattern},summary.ilike.{search_pattern},source_id.ilike.{search_pattern}"
|
f"title.ilike.{search_pattern},summary.ilike.{search_pattern},source_id.ilike.{search_pattern}"
|
||||||
)
|
)
|
||||||
|
|
||||||
count_result = count_query.execute()
|
count_result = count_query.execute()
|
||||||
total = count_result.count if hasattr(count_result, "count") else 0
|
total = count_result.count if hasattr(count_result, "count") else 0
|
||||||
|
|
||||||
# Apply pagination at database level
|
# Apply pagination at database level
|
||||||
start_idx = (page - 1) * per_page
|
start_idx = (page - 1) * per_page
|
||||||
query = query.range(start_idx, start_idx + per_page - 1)
|
query = query.range(start_idx, start_idx + per_page - 1)
|
||||||
|
|
||||||
# Execute query
|
# Execute query
|
||||||
result = query.execute()
|
result = query.execute()
|
||||||
sources = result.data if result.data else []
|
sources = result.data if result.data else []
|
||||||
|
|
||||||
# Get source IDs for batch queries
|
# Get source IDs for batch queries
|
||||||
source_ids = [source["source_id"] for source in sources]
|
source_ids = [source["source_id"] for source in sources]
|
||||||
|
|
||||||
# Debug log source IDs
|
# Debug log source IDs
|
||||||
safe_logfire_info(f"Source IDs for batch query: {source_ids}")
|
safe_logfire_info(f"Source IDs for batch query: {source_ids}")
|
||||||
|
|
||||||
# Batch fetch related data to avoid N+1 queries
|
# Batch fetch related data to avoid N+1 queries
|
||||||
first_urls = {}
|
first_urls = {}
|
||||||
code_example_counts = {}
|
code_example_counts = {}
|
||||||
chunk_counts = {}
|
chunk_counts = {}
|
||||||
|
|
||||||
if source_ids:
|
if source_ids:
|
||||||
# Batch fetch first URLs
|
# Batch fetch first URLs
|
||||||
urls_result = (
|
urls_result = (
|
||||||
self.supabase.from_("archon_crawled_pages")
|
self.supabase.from_("archon_crawled_pages")
|
||||||
.select("source_id, url")
|
.select("source_id, url")
|
||||||
.in_("source_id", source_ids)
|
.in_("source_id", source_ids)
|
||||||
.execute()
|
.execute()
|
||||||
)
|
)
|
||||||
|
|
||||||
# Group URLs by source_id (take first one for each)
|
# Group URLs by source_id (take first one for each)
|
||||||
for item in urls_result.data or []:
|
for item in urls_result.data or []:
|
||||||
if item["source_id"] not in first_urls:
|
if item["source_id"] not in first_urls:
|
||||||
first_urls[item["source_id"]] = item["url"]
|
first_urls[item["source_id"]] = item["url"]
|
||||||
|
|
||||||
# Get code example counts per source - NO CONTENT, just counts!
|
# Get code example counts per source - NO CONTENT, just counts!
|
||||||
# Fetch counts individually for each source
|
# Fetch counts individually for each source
|
||||||
for source_id in source_ids:
|
for source_id in source_ids:
|
||||||
count_result = (
|
count_result = (
|
||||||
self.supabase.from_("archon_code_examples")
|
self.supabase.from_("archon_code_examples")
|
||||||
.select("id", count="exact", head=True)
|
.select("id", count="exact", head=True)
|
||||||
.eq("source_id", source_id)
|
.eq("source_id", source_id)
|
||||||
.execute()
|
.execute()
|
||||||
)
|
)
|
||||||
code_example_counts[source_id] = (
|
code_example_counts[source_id] = (
|
||||||
count_result.count if hasattr(count_result, "count") else 0
|
count_result.count if hasattr(count_result, "count") else 0
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ensure all sources have a count (default to 0)
|
# Ensure all sources have a count (default to 0)
|
||||||
for source_id in source_ids:
|
for source_id in source_ids:
|
||||||
if source_id not in code_example_counts:
|
if source_id not in code_example_counts:
|
||||||
code_example_counts[source_id] = 0
|
code_example_counts[source_id] = 0
|
||||||
chunk_counts[source_id] = 0 # Default to 0 to avoid timeout
|
chunk_counts[source_id] = 0 # Default to 0 to avoid timeout
|
||||||
|
|
||||||
safe_logfire_info(f"Code example counts: {code_example_counts}")
|
safe_logfire_info("Code example counts", code_counts=code_example_counts)
|
||||||
|
|
||||||
# Transform sources to items with batched data
|
# Transform sources to items with batched data
|
||||||
items = []
|
items = []
|
||||||
for source in sources:
|
for source in sources:
|
||||||
source_id = source["source_id"]
|
source_id = source["source_id"]
|
||||||
source_metadata = source.get("metadata", {})
|
source_metadata = source.get("metadata", {})
|
||||||
|
|
||||||
# Use batched data instead of individual queries
|
# Use batched data instead of individual queries
|
||||||
first_page_url = first_urls.get(source_id, f"source://{source_id}")
|
first_page_url = first_urls.get(source_id, f"source://{source_id}")
|
||||||
code_examples_count = code_example_counts.get(source_id, 0)
|
# Use original crawl URL instead of first page URL
|
||||||
chunks_count = chunk_counts.get(source_id, 0)
|
original_url = source_metadata.get("original_url") or first_page_url
|
||||||
|
code_examples_count = code_example_counts.get(source_id, 0)
|
||||||
# Determine source type
|
chunks_count = chunk_counts.get(source_id, 0)
|
||||||
source_type = self._determine_source_type(source_metadata, first_page_url)
|
|
||||||
|
# Determine source type
|
||||||
item = {
|
source_type = self._determine_source_type(source_metadata, original_url)
|
||||||
"id": source_id,
|
|
||||||
"title": source.get("title", source.get("summary", "Untitled")),
|
item = {
|
||||||
"url": first_page_url,
|
"id": source_id,
|
||||||
"source_id": source_id,
|
"title": source.get("title", source.get("summary", "Untitled")),
|
||||||
"code_examples": [{"count": code_examples_count}]
|
"url": original_url,
|
||||||
if code_examples_count > 0
|
"source_id": source_id,
|
||||||
else [], # Minimal array just for count display
|
"code_examples": [{"count": code_examples_count}]
|
||||||
"metadata": {
|
if code_examples_count > 0
|
||||||
"knowledge_type": source_metadata.get("knowledge_type", "technical"),
|
else [], # Minimal array just for count display
|
||||||
"tags": source_metadata.get("tags", []),
|
"metadata": {
|
||||||
"source_type": source_type,
|
"knowledge_type": source_metadata.get("knowledge_type", "technical"),
|
||||||
"status": "active",
|
"tags": source_metadata.get("tags", []),
|
||||||
"description": source_metadata.get(
|
"source_type": source_type,
|
||||||
"description", source.get("summary", "")
|
"status": "active",
|
||||||
),
|
"description": source_metadata.get(
|
||||||
"chunks_count": chunks_count,
|
"description", source.get("summary", "")
|
||||||
"word_count": source.get("total_word_count", 0),
|
),
|
||||||
"estimated_pages": round(source.get("total_word_count", 0) / 250, 1),
|
"chunks_count": chunks_count,
|
||||||
"pages_tooltip": f"{round(source.get('total_word_count', 0) / 250, 1)} pages (≈ {source.get('total_word_count', 0):,} words)",
|
"word_count": source.get("total_word_count", 0),
|
||||||
"last_scraped": source.get("updated_at"),
|
"estimated_pages": round(source.get("total_word_count", 0) / 250, 1),
|
||||||
"file_name": source_metadata.get("file_name"),
|
"pages_tooltip": f"{round(source.get('total_word_count', 0) / 250, 1)} pages (≈ {source.get('total_word_count', 0):,} words)",
|
||||||
"file_type": source_metadata.get("file_type"),
|
"last_scraped": source.get("updated_at"),
|
||||||
"update_frequency": source_metadata.get("update_frequency", 7),
|
"file_name": source_metadata.get("file_name"),
|
||||||
"code_examples_count": code_examples_count,
|
"file_type": source_metadata.get("file_type"),
|
||||||
**source_metadata,
|
"update_frequency": source_metadata.get("update_frequency", 7),
|
||||||
},
|
"code_examples_count": code_examples_count,
|
||||||
"created_at": source.get("created_at"),
|
**source_metadata,
|
||||||
"updated_at": source.get("updated_at"),
|
},
|
||||||
}
|
"created_at": source.get("created_at"),
|
||||||
items.append(item)
|
"updated_at": source.get("updated_at"),
|
||||||
|
}
|
||||||
safe_logfire_info(
|
items.append(item)
|
||||||
f"Knowledge items retrieved | total={total} | page={page} | filtered_count={len(items)}"
|
|
||||||
)
|
safe_logfire_info(
|
||||||
|
f"Knowledge items retrieved | total={total} | page={page} | filtered_count={len(items)}"
|
||||||
return {
|
)
|
||||||
"items": items,
|
|
||||||
"total": total,
|
return {
|
||||||
"page": page,
|
"items": items,
|
||||||
"per_page": per_page,
|
"total": total,
|
||||||
"pages": (total + per_page - 1) // per_page,
|
"page": page,
|
||||||
}
|
"per_page": per_page,
|
||||||
|
"pages": (total + per_page - 1) // per_page,
|
||||||
except Exception as e:
|
}
|
||||||
safe_logfire_error(f"Failed to list knowledge items | error={str(e)}")
|
|
||||||
raise
|
except Exception as e:
|
||||||
|
safe_logfire_error(f"Failed to list knowledge items | error={str(e)}")
|
||||||
async def get_item(self, source_id: str) -> dict[str, Any] | None:
|
raise
|
||||||
"""
|
|
||||||
Get a single knowledge item by source ID.
|
async def get_item(self, source_id: str) -> dict[str, Any] | None:
|
||||||
|
"""
|
||||||
Args:
|
Get a single knowledge item by source ID.
|
||||||
source_id: The source ID to retrieve
|
|
||||||
|
Args:
|
||||||
Returns:
|
source_id: The source ID to retrieve
|
||||||
Knowledge item dict or None if not found
|
|
||||||
"""
|
Returns:
|
||||||
try:
|
Knowledge item dict or None if not found
|
||||||
safe_logfire_info(f"Getting knowledge item | source_id={source_id}")
|
"""
|
||||||
|
try:
|
||||||
# Get the source record
|
safe_logfire_info(f"Getting knowledge item | source_id={source_id}")
|
||||||
result = (
|
|
||||||
self.supabase.from_("archon_sources")
|
# Get the source record
|
||||||
.select("*")
|
result = (
|
||||||
.eq("source_id", source_id)
|
self.supabase.from_("archon_sources")
|
||||||
.single()
|
.select("*")
|
||||||
.execute()
|
.eq("source_id", source_id)
|
||||||
)
|
.single()
|
||||||
|
.execute()
|
||||||
if not result.data:
|
)
|
||||||
return None
|
|
||||||
|
if not result.data:
|
||||||
# Transform the source to item format
|
return None
|
||||||
item = await self._transform_source_to_item(result.data)
|
|
||||||
return item
|
# Transform the source to item format
|
||||||
|
item = await self._transform_source_to_item(result.data)
|
||||||
except Exception as e:
|
return item
|
||||||
safe_logfire_error(
|
|
||||||
f"Failed to get knowledge item | error={str(e)} | source_id={source_id}"
|
except Exception as e:
|
||||||
)
|
safe_logfire_error(
|
||||||
return None
|
f"Failed to get knowledge item | error={str(e)} | source_id={source_id}"
|
||||||
|
)
|
||||||
async def update_item(
|
return None
|
||||||
self, source_id: str, updates: dict[str, Any]
|
|
||||||
) -> tuple[bool, dict[str, Any]]:
|
async def update_item(
|
||||||
"""
|
self, source_id: str, updates: dict[str, Any]
|
||||||
Update a knowledge item's metadata.
|
) -> tuple[bool, dict[str, Any]]:
|
||||||
|
"""
|
||||||
Args:
|
Update a knowledge item's metadata.
|
||||||
source_id: The source ID to update
|
|
||||||
updates: Dictionary of fields to update
|
Args:
|
||||||
|
source_id: The source ID to update
|
||||||
Returns:
|
updates: Dictionary of fields to update
|
||||||
Tuple of (success, result)
|
|
||||||
"""
|
Returns:
|
||||||
try:
|
Tuple of (success, result)
|
||||||
safe_logfire_info(
|
"""
|
||||||
f"Updating knowledge item | source_id={source_id} | updates={updates}"
|
try:
|
||||||
)
|
safe_logfire_info(
|
||||||
|
f"Updating knowledge item | source_id={source_id} | updates={updates}"
|
||||||
# Prepare update data
|
)
|
||||||
update_data = {}
|
|
||||||
|
# Prepare update data
|
||||||
# Handle title updates
|
update_data = {}
|
||||||
if "title" in updates:
|
|
||||||
update_data["title"] = updates["title"]
|
# Handle title updates
|
||||||
|
if "title" in updates:
|
||||||
# Handle metadata updates
|
update_data["title"] = updates["title"]
|
||||||
metadata_fields = [
|
|
||||||
"description",
|
# Handle metadata updates
|
||||||
"knowledge_type",
|
metadata_fields = [
|
||||||
"tags",
|
"description",
|
||||||
"status",
|
"knowledge_type",
|
||||||
"update_frequency",
|
"tags",
|
||||||
"group_name",
|
"status",
|
||||||
]
|
"update_frequency",
|
||||||
metadata_updates = {k: v for k, v in updates.items() if k in metadata_fields}
|
"group_name",
|
||||||
|
]
|
||||||
if metadata_updates:
|
metadata_updates = {k: v for k, v in updates.items() if k in metadata_fields}
|
||||||
# Get current metadata
|
|
||||||
current_response = (
|
if metadata_updates:
|
||||||
self.supabase.table("archon_sources")
|
# Get current metadata
|
||||||
.select("metadata")
|
current_response = (
|
||||||
.eq("source_id", source_id)
|
self.supabase.table("archon_sources")
|
||||||
.execute()
|
.select("metadata")
|
||||||
)
|
.eq("source_id", source_id)
|
||||||
if current_response.data:
|
.execute()
|
||||||
current_metadata = current_response.data[0].get("metadata", {})
|
)
|
||||||
current_metadata.update(metadata_updates)
|
if current_response.data:
|
||||||
update_data["metadata"] = current_metadata
|
current_metadata = current_response.data[0].get("metadata", {})
|
||||||
else:
|
current_metadata.update(metadata_updates)
|
||||||
update_data["metadata"] = metadata_updates
|
update_data["metadata"] = current_metadata
|
||||||
|
else:
|
||||||
# Perform the update
|
update_data["metadata"] = metadata_updates
|
||||||
result = (
|
|
||||||
self.supabase.table("archon_sources")
|
# Perform the update
|
||||||
.update(update_data)
|
result = (
|
||||||
.eq("source_id", source_id)
|
self.supabase.table("archon_sources")
|
||||||
.execute()
|
.update(update_data)
|
||||||
)
|
.eq("source_id", source_id)
|
||||||
|
.execute()
|
||||||
if result.data:
|
)
|
||||||
safe_logfire_info(f"Knowledge item updated successfully | source_id={source_id}")
|
|
||||||
return True, {
|
if result.data:
|
||||||
"success": True,
|
safe_logfire_info(f"Knowledge item updated successfully | source_id={source_id}")
|
||||||
"message": f"Successfully updated knowledge item {source_id}",
|
return True, {
|
||||||
"source_id": source_id,
|
"success": True,
|
||||||
}
|
"message": f"Successfully updated knowledge item {source_id}",
|
||||||
else:
|
"source_id": source_id,
|
||||||
safe_logfire_error(f"Knowledge item not found | source_id={source_id}")
|
}
|
||||||
return False, {"error": f"Knowledge item {source_id} not found"}
|
else:
|
||||||
|
safe_logfire_error(f"Knowledge item not found | source_id={source_id}")
|
||||||
except Exception as e:
|
return False, {"error": f"Knowledge item {source_id} not found"}
|
||||||
safe_logfire_error(
|
|
||||||
f"Failed to update knowledge item | error={str(e)} | source_id={source_id}"
|
except Exception as e:
|
||||||
)
|
safe_logfire_error(
|
||||||
return False, {"error": str(e)}
|
f"Failed to update knowledge item | error={str(e)} | source_id={source_id}"
|
||||||
|
)
|
||||||
async def get_available_sources(self) -> dict[str, Any]:
|
return False, {"error": str(e)}
|
||||||
"""
|
|
||||||
Get all available sources with their details.
|
async def get_available_sources(self) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
Returns:
|
Get all available sources with their details.
|
||||||
Dict containing sources list and count
|
|
||||||
"""
|
Returns:
|
||||||
try:
|
Dict containing sources list and count
|
||||||
# Query the sources table
|
"""
|
||||||
result = self.supabase.from_("archon_sources").select("*").order("source_id").execute()
|
try:
|
||||||
|
# Query the sources table
|
||||||
# Format the sources
|
result = self.supabase.from_("archon_sources").select("*").order("source_id").execute()
|
||||||
sources = []
|
|
||||||
if result.data:
|
# Format the sources
|
||||||
for source in result.data:
|
sources = []
|
||||||
sources.append({
|
if result.data:
|
||||||
"source_id": source.get("source_id"),
|
for source in result.data:
|
||||||
"title": source.get("title", source.get("summary", "Untitled")),
|
sources.append({
|
||||||
"summary": source.get("summary"),
|
"source_id": source.get("source_id"),
|
||||||
"metadata": source.get("metadata", {}),
|
"title": source.get("title", source.get("summary", "Untitled")),
|
||||||
"total_words": source.get("total_words", source.get("total_word_count", 0)),
|
"summary": source.get("summary"),
|
||||||
"update_frequency": source.get("update_frequency", 7),
|
"metadata": source.get("metadata", {}),
|
||||||
"created_at": source.get("created_at"),
|
"total_words": source.get("total_words", source.get("total_word_count", 0)),
|
||||||
"updated_at": source.get("updated_at", source.get("created_at")),
|
"update_frequency": source.get("update_frequency", 7),
|
||||||
})
|
"created_at": source.get("created_at"),
|
||||||
|
"updated_at": source.get("updated_at", source.get("created_at")),
|
||||||
return {"success": True, "sources": sources, "count": len(sources)}
|
})
|
||||||
|
|
||||||
except Exception as e:
|
return {"success": True, "sources": sources, "count": len(sources)}
|
||||||
safe_logfire_error(f"Failed to get available sources | error={str(e)}")
|
|
||||||
return {"success": False, "error": str(e), "sources": [], "count": 0}
|
except Exception as e:
|
||||||
|
safe_logfire_error(f"Failed to get available sources | error={str(e)}")
|
||||||
async def _get_all_sources(self) -> list[dict[str, Any]]:
|
return {"success": False, "error": str(e), "sources": [], "count": 0}
|
||||||
"""Get all sources from the database."""
|
|
||||||
result = await self.get_available_sources()
|
async def _get_all_sources(self) -> list[dict[str, Any]]:
|
||||||
return result.get("sources", [])
|
"""Get all sources from the database."""
|
||||||
|
result = await self.get_available_sources()
|
||||||
async def _transform_source_to_item(self, source: dict[str, Any]) -> dict[str, Any]:
|
return result.get("sources", [])
|
||||||
"""
|
|
||||||
Transform a source record into a knowledge item with enriched data.
|
async def _transform_source_to_item(self, source: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
Args:
|
Transform a source record into a knowledge item with enriched data.
|
||||||
source: The source record from database
|
|
||||||
|
Args:
|
||||||
Returns:
|
source: The source record from database
|
||||||
Transformed knowledge item
|
|
||||||
"""
|
Returns:
|
||||||
source_metadata = source.get("metadata", {})
|
Transformed knowledge item
|
||||||
source_id = source["source_id"]
|
"""
|
||||||
|
source_metadata = source.get("metadata", {})
|
||||||
# Get first page URL
|
source_id = source["source_id"]
|
||||||
first_page_url = await self._get_first_page_url(source_id)
|
|
||||||
|
# Get first page URL
|
||||||
# Determine source type
|
first_page_url = await self._get_first_page_url(source_id)
|
||||||
source_type = self._determine_source_type(source_metadata, first_page_url)
|
|
||||||
|
# Determine source type
|
||||||
# Get code examples
|
source_type = self._determine_source_type(source_metadata, first_page_url)
|
||||||
code_examples = await self._get_code_examples(source_id)
|
|
||||||
|
# Get code examples
|
||||||
return {
|
code_examples = await self._get_code_examples(source_id)
|
||||||
"id": source_id,
|
|
||||||
"title": source.get("title", source.get("summary", "Untitled")),
|
return {
|
||||||
"url": first_page_url,
|
"id": source_id,
|
||||||
"source_id": source_id,
|
"title": source.get("title", source.get("summary", "Untitled")),
|
||||||
"code_examples": code_examples,
|
"url": first_page_url,
|
||||||
"metadata": {
|
"source_id": source_id,
|
||||||
# Spread source_metadata first, then override with computed values
|
"code_examples": code_examples,
|
||||||
**source_metadata,
|
"metadata": {
|
||||||
"knowledge_type": source_metadata.get("knowledge_type", "technical"),
|
"knowledge_type": source_metadata.get("knowledge_type", "technical"),
|
||||||
"tags": source_metadata.get("tags", []),
|
"tags": source_metadata.get("tags", []),
|
||||||
"source_type": source_type, # This should be the correctly determined source_type
|
"source_type": source_type,
|
||||||
"status": "active",
|
"status": "active",
|
||||||
"description": source_metadata.get("description", source.get("summary", "")),
|
"description": source_metadata.get("description", source.get("summary", "")),
|
||||||
"chunks_count": await self._get_chunks_count(source_id), # Get actual chunk count
|
"chunks_count": await self._get_chunks_count(source_id), # Get actual chunk count
|
||||||
"word_count": source.get("total_words", 0),
|
"word_count": source.get("total_words", 0),
|
||||||
"estimated_pages": round(
|
"estimated_pages": round(
|
||||||
source.get("total_words", 0) / 250, 1
|
source.get("total_words", 0) / 250, 1
|
||||||
), # Average book page = 250 words
|
), # Average book page = 250 words
|
||||||
"pages_tooltip": f"{round(source.get('total_words', 0) / 250, 1)} pages (≈ {source.get('total_words', 0):,} words)",
|
"pages_tooltip": f"{round(source.get('total_words', 0) / 250, 1)} pages (≈ {source.get('total_words', 0):,} words)",
|
||||||
"last_scraped": source.get("updated_at"),
|
"last_scraped": source.get("updated_at"),
|
||||||
"file_name": source_metadata.get("file_name"),
|
"file_name": source_metadata.get("file_name"),
|
||||||
"file_type": source_metadata.get("file_type"),
|
"file_type": source_metadata.get("file_type"),
|
||||||
"update_frequency": source.get("update_frequency", 7),
|
"update_frequency": source.get("update_frequency", 7),
|
||||||
"code_examples_count": len(code_examples),
|
"code_examples_count": len(code_examples),
|
||||||
},
|
**source_metadata,
|
||||||
"created_at": source.get("created_at"),
|
},
|
||||||
"updated_at": source.get("updated_at"),
|
"created_at": source.get("created_at"),
|
||||||
}
|
"updated_at": source.get("updated_at"),
|
||||||
|
}
|
||||||
async def _get_first_page_url(self, source_id: str) -> str:
|
|
||||||
"""Get the first page URL for a source."""
|
async def _get_first_page_url(self, source_id: str) -> str:
|
||||||
try:
|
"""Get the first page URL for a source."""
|
||||||
pages_response = (
|
try:
|
||||||
self.supabase.from_("archon_crawled_pages")
|
pages_response = (
|
||||||
.select("url")
|
self.supabase.from_("archon_crawled_pages")
|
||||||
.eq("source_id", source_id)
|
.select("url")
|
||||||
.limit(1)
|
.eq("source_id", source_id)
|
||||||
.execute()
|
.limit(1)
|
||||||
)
|
.execute()
|
||||||
|
)
|
||||||
if pages_response.data:
|
|
||||||
return pages_response.data[0].get("url", f"source://{source_id}")
|
if pages_response.data:
|
||||||
|
return pages_response.data[0].get("url", f"source://{source_id}")
|
||||||
except Exception:
|
|
||||||
pass
|
except Exception:
|
||||||
|
pass
|
||||||
return f"source://{source_id}"
|
|
||||||
|
return f"source://{source_id}"
|
||||||
async def _get_code_examples(self, source_id: str) -> list[dict[str, Any]]:
|
|
||||||
"""Get code examples for a source."""
|
async def _get_code_examples(self, source_id: str) -> list[dict[str, Any]]:
|
||||||
try:
|
"""Get code examples for a source."""
|
||||||
code_examples_response = (
|
try:
|
||||||
self.supabase.from_("archon_code_examples")
|
code_examples_response = (
|
||||||
.select("id, content, summary, metadata")
|
self.supabase.from_("archon_code_examples")
|
||||||
.eq("source_id", source_id)
|
.select("id, content, summary, metadata")
|
||||||
.execute()
|
.eq("source_id", source_id)
|
||||||
)
|
.execute()
|
||||||
|
)
|
||||||
return code_examples_response.data if code_examples_response.data else []
|
|
||||||
|
return code_examples_response.data if code_examples_response.data else []
|
||||||
except Exception:
|
|
||||||
return []
|
except Exception:
|
||||||
|
return []
|
||||||
def _determine_source_type(self, metadata: dict[str, Any], url: str) -> str:
|
|
||||||
"""Determine the source type from metadata or URL pattern."""
|
def _determine_source_type(self, metadata: dict[str, Any], url: str) -> str:
|
||||||
stored_source_type = metadata.get("source_type")
|
"""Determine the source type from metadata or URL pattern."""
|
||||||
if stored_source_type:
|
stored_source_type = metadata.get("source_type")
|
||||||
return stored_source_type
|
if stored_source_type:
|
||||||
|
return stored_source_type
|
||||||
# Legacy fallback - check URL pattern
|
|
||||||
return "file" if url.startswith("file://") else "url"
|
# Legacy fallback - check URL pattern
|
||||||
|
return "file" if url.startswith("file://") else "url"
|
||||||
def _filter_by_search(self, items: list[dict[str, Any]], search: str) -> list[dict[str, Any]]:
|
|
||||||
"""Filter items by search term."""
|
def _filter_by_search(self, items: list[dict[str, Any]], search: str) -> list[dict[str, Any]]:
|
||||||
search_lower = search.lower()
|
"""Filter items by search term."""
|
||||||
return [
|
search_lower = search.lower()
|
||||||
item
|
return [
|
||||||
for item in items
|
item
|
||||||
if search_lower in item["title"].lower()
|
for item in items
|
||||||
or search_lower in item["metadata"].get("description", "").lower()
|
if search_lower in item["title"].lower()
|
||||||
or any(search_lower in tag.lower() for tag in item["metadata"].get("tags", []))
|
or search_lower in item["metadata"].get("description", "").lower()
|
||||||
]
|
or any(search_lower in tag.lower() for tag in item["metadata"].get("tags", []))
|
||||||
|
]
|
||||||
def _filter_by_knowledge_type(
|
|
||||||
self, items: list[dict[str, Any]], knowledge_type: str
|
def _filter_by_knowledge_type(
|
||||||
) -> list[dict[str, Any]]:
|
self, items: list[dict[str, Any]], knowledge_type: str
|
||||||
"""Filter items by knowledge type."""
|
) -> list[dict[str, Any]]:
|
||||||
return [item for item in items if item["metadata"].get("knowledge_type") == knowledge_type]
|
"""Filter items by knowledge type."""
|
||||||
|
return [item for item in items if item["metadata"].get("knowledge_type") == knowledge_type]
|
||||||
async def _get_chunks_count(self, source_id: str) -> int:
|
|
||||||
"""Get the actual number of chunks for a source."""
|
async def _get_chunks_count(self, source_id: str) -> int:
|
||||||
try:
|
"""Get the actual number of chunks for a source."""
|
||||||
# Count the actual rows in crawled_pages for this source
|
try:
|
||||||
result = (
|
# Count the actual rows in crawled_pages for this source
|
||||||
self.supabase.table("archon_crawled_pages")
|
result = (
|
||||||
.select("*", count="exact")
|
self.supabase.table("archon_crawled_pages")
|
||||||
.eq("source_id", source_id)
|
.select("*", count="exact")
|
||||||
.execute()
|
.eq("source_id", source_id)
|
||||||
)
|
.execute()
|
||||||
|
)
|
||||||
# Return the count of pages (chunks)
|
|
||||||
return result.count if result.count else 0
|
# Return the count of pages (chunks)
|
||||||
|
return result.count if result.count else 0
|
||||||
except Exception as e:
|
|
||||||
# If we can't get chunk count, return 0
|
except Exception as e:
|
||||||
safe_logfire_info(f"Failed to get chunk count for {source_id}: {e}")
|
# If we can't get chunk count, return 0
|
||||||
return 0
|
safe_logfire_info(f"Failed to get chunk count for {source_id}: {e}")
|
||||||
|
return 0
|
||||||
|
|||||||
@@ -11,13 +11,10 @@ from datetime import datetime
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from ...config.logfire_config import get_logger
|
from ...config.logfire_config import get_logger
|
||||||
from ...socketio_app import get_socketio_instance
|
from ...socketio_app import sio
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
# Get Socket.IO instance
|
|
||||||
sio = get_socketio_instance()
|
|
||||||
logger.info(f"🔗 [PROGRESS] Socket.IO instance ID: {id(sio)}")
|
|
||||||
|
|
||||||
|
|
||||||
class ProgressService:
|
class ProgressService:
|
||||||
|
|||||||
@@ -17,9 +17,7 @@ logger = get_logger(__name__)
|
|||||||
|
|
||||||
# Import Socket.IO instance directly to avoid circular imports
|
# Import Socket.IO instance directly to avoid circular imports
|
||||||
try:
|
try:
|
||||||
from ...socketio_app import get_socketio_instance
|
from ...socketio_app import sio as _sio
|
||||||
|
|
||||||
_sio = get_socketio_instance()
|
|
||||||
_broadcast_available = True
|
_broadcast_available = True
|
||||||
logger.info("✅ Socket.IO broadcasting is AVAILABLE - real-time updates enabled")
|
logger.info("✅ Socket.IO broadcasting is AVAILABLE - real-time updates enabled")
|
||||||
|
|
||||||
|
|||||||
@@ -870,14 +870,17 @@ async def add_code_examples_to_supabase(
|
|||||||
|
|
||||||
# Prepare batch data - only for successful embeddings
|
# Prepare batch data - only for successful embeddings
|
||||||
batch_data = []
|
batch_data = []
|
||||||
|
used_indices = set() # Track which indices have been mapped to prevent duplicates
|
||||||
|
|
||||||
for j, (embedding, text) in enumerate(
|
for j, (embedding, text) in enumerate(
|
||||||
zip(valid_embeddings, successful_texts, strict=False)
|
zip(valid_embeddings, successful_texts, strict=False)
|
||||||
):
|
):
|
||||||
# Find the original index
|
# Find the original index (skip already used indices)
|
||||||
orig_idx = None
|
orig_idx = None
|
||||||
for k, orig_text in enumerate(batch_texts):
|
for k, orig_text in enumerate(batch_texts):
|
||||||
if orig_text == text:
|
if orig_text == text and k not in used_indices:
|
||||||
orig_idx = k
|
orig_idx = k
|
||||||
|
used_indices.add(k) # Mark this index as used
|
||||||
break
|
break
|
||||||
|
|
||||||
if orig_idx is None:
|
if orig_idx is None:
|
||||||
|
|||||||
@@ -266,20 +266,23 @@ async def add_documents_to_supabase(
|
|||||||
search_logger.warning(
|
search_logger.warning(
|
||||||
f"Skipping batch {batch_num} - no successful embeddings created"
|
f"Skipping batch {batch_num} - no successful embeddings created"
|
||||||
)
|
)
|
||||||
completed_batches += 1
|
# Don't increment completed_batches when skipping - this causes progress to jump
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Prepare batch data - only for successful embeddings
|
# Prepare batch data - only for successful embeddings
|
||||||
batch_data = []
|
batch_data = []
|
||||||
|
used_indices = set() # Track which indices have been mapped to prevent duplicates
|
||||||
|
|
||||||
# Map successful texts back to their original indices
|
# Map successful texts back to their original indices
|
||||||
for j, (embedding, text) in enumerate(
|
for j, (embedding, text) in enumerate(
|
||||||
zip(batch_embeddings, successful_texts, strict=False)
|
zip(batch_embeddings, successful_texts, strict=False)
|
||||||
):
|
):
|
||||||
# Find the original index of this text
|
# Find the original index of this text (skip already used indices)
|
||||||
orig_idx = None
|
orig_idx = None
|
||||||
for idx, orig_text in enumerate(contextual_contents):
|
for idx, orig_text in enumerate(contextual_contents):
|
||||||
if orig_text == text:
|
if orig_text == text and idx not in used_indices:
|
||||||
orig_idx = idx
|
orig_idx = idx
|
||||||
|
used_indices.add(idx) # Mark this index as used
|
||||||
break
|
break
|
||||||
|
|
||||||
if orig_idx is None:
|
if orig_idx is None:
|
||||||
@@ -370,6 +373,9 @@ async def add_documents_to_supabase(
|
|||||||
search_logger.info(
|
search_logger.info(
|
||||||
f"Individual inserts: {successful_inserts}/{len(batch_data)} successful"
|
f"Individual inserts: {successful_inserts}/{len(batch_data)} successful"
|
||||||
)
|
)
|
||||||
|
# Even if we had to fall back to individual inserts, count this batch as processed
|
||||||
|
if successful_inserts > 0:
|
||||||
|
completed_batches += 1
|
||||||
|
|
||||||
# Minimal delay between batches to prevent overwhelming
|
# Minimal delay between batches to prevent overwhelming
|
||||||
if i + batch_size < len(contents):
|
if i + batch_size < len(contents):
|
||||||
|
|||||||
@@ -84,17 +84,10 @@ class RateLimiter:
|
|||||||
self.semaphore = asyncio.Semaphore(config.max_concurrent)
|
self.semaphore = asyncio.Semaphore(config.max_concurrent)
|
||||||
self._lock = asyncio.Lock()
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
async def acquire(self, estimated_tokens: int = 8000, progress_callback: Callable | None = None) -> bool:
|
async def acquire(self, estimated_tokens: int = 8000) -> bool:
|
||||||
"""Acquire permission to make API call with token awareness
|
"""Acquire permission to make API call with token awareness"""
|
||||||
|
async with self._lock:
|
||||||
Args:
|
while True: # Use a loop instead of recursion
|
||||||
estimated_tokens: Estimated number of tokens for the operation
|
|
||||||
progress_callback: Optional async callback for progress updates during wait
|
|
||||||
"""
|
|
||||||
while True: # Loop instead of recursion to avoid stack overflow
|
|
||||||
wait_time_to_sleep = None
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
# Clean old entries
|
# Clean old entries
|
||||||
@@ -106,41 +99,30 @@ class RateLimiter:
|
|||||||
self.request_times.append(now)
|
self.request_times.append(now)
|
||||||
self.token_usage.append((now, estimated_tokens))
|
self.token_usage.append((now, estimated_tokens))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Calculate wait time if we can't make the request
|
# Calculate wait time
|
||||||
wait_time = self._calculate_wait_time(estimated_tokens)
|
wait_time = self._calculate_wait_time(estimated_tokens)
|
||||||
if wait_time > 0:
|
if wait_time <= 0:
|
||||||
logfire_logger.info(
|
|
||||||
f"Rate limiting: waiting {wait_time:.1f}s",
|
|
||||||
extra={
|
|
||||||
"tokens": estimated_tokens,
|
|
||||||
"current_usage": self._get_current_usage(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
wait_time_to_sleep = wait_time
|
|
||||||
else:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Sleep outside the lock to avoid deadlock
|
logfire_logger.info(
|
||||||
if wait_time_to_sleep is not None:
|
f"Rate limiting: waiting {wait_time:.1f}s",
|
||||||
# For long waits, break into smaller chunks with progress updates
|
extra={
|
||||||
if wait_time_to_sleep > 5 and progress_callback:
|
"tokens": estimated_tokens,
|
||||||
chunks = int(wait_time_to_sleep / 5) # 5 second chunks
|
"current_usage": self._get_current_usage(),
|
||||||
for i in range(chunks):
|
}
|
||||||
await asyncio.sleep(5)
|
)
|
||||||
remaining = wait_time_to_sleep - (i + 1) * 5
|
|
||||||
if progress_callback:
|
# Release the lock while sleeping to allow other operations
|
||||||
await progress_callback({
|
self._lock.release()
|
||||||
"type": "rate_limit_wait",
|
try:
|
||||||
"remaining_seconds": max(0, remaining),
|
await asyncio.sleep(wait_time)
|
||||||
"message": f"waiting {max(0, remaining):.1f}s more..."
|
logfire_logger.info(f"Rate limiting: resuming after {wait_time:.1f}s wait")
|
||||||
})
|
finally:
|
||||||
# Sleep any remaining time
|
# Re-acquire the lock before continuing
|
||||||
if wait_time_to_sleep % 5 > 0:
|
await self._lock.acquire()
|
||||||
await asyncio.sleep(wait_time_to_sleep % 5)
|
|
||||||
else:
|
# Loop will continue and re-check conditions
|
||||||
await asyncio.sleep(wait_time_to_sleep)
|
|
||||||
# Continue the loop to try again
|
|
||||||
|
|
||||||
def _can_make_request(self, estimated_tokens: int) -> bool:
|
def _can_make_request(self, estimated_tokens: int) -> bool:
|
||||||
"""Check if request can be made within limits"""
|
"""Check if request can be made within limits"""
|
||||||
@@ -540,15 +522,10 @@ class ThreadingService:
|
|||||||
logfire_logger.info("Threading service stopped")
|
logfire_logger.info("Threading service stopped")
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def rate_limited_operation(self, estimated_tokens: int = 8000, progress_callback: Callable | None = None):
|
async def rate_limited_operation(self, estimated_tokens: int = 8000):
|
||||||
"""Context manager for rate-limited operations
|
"""Context manager for rate-limited operations"""
|
||||||
|
|
||||||
Args:
|
|
||||||
estimated_tokens: Estimated number of tokens for the operation
|
|
||||||
progress_callback: Optional async callback for progress updates during wait
|
|
||||||
"""
|
|
||||||
async with self.rate_limiter.semaphore:
|
async with self.rate_limiter.semaphore:
|
||||||
can_proceed = await self.rate_limiter.acquire(estimated_tokens, progress_callback)
|
can_proceed = await self.rate_limiter.acquire(estimated_tokens)
|
||||||
if not can_proceed:
|
if not can_proceed:
|
||||||
raise Exception("Rate limit exceeded")
|
raise Exception("Rate limit exceeded")
|
||||||
|
|
||||||
@@ -676,4 +653,4 @@ async def stop_threading_service():
|
|||||||
global _threading_service
|
global _threading_service
|
||||||
if _threading_service:
|
if _threading_service:
|
||||||
await _threading_service.stop()
|
await _threading_service.stop()
|
||||||
_threading_service = None
|
_threading_service = None
|
||||||
@@ -26,17 +26,6 @@ sio = socketio.AsyncServer(
|
|||||||
ping_interval=60, # 1 minute - check connection every minute
|
ping_interval=60, # 1 minute - check connection every minute
|
||||||
)
|
)
|
||||||
|
|
||||||
# Global Socket.IO instance for use across modules
|
|
||||||
_socketio_instance: socketio.AsyncServer | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_socketio_instance() -> socketio.AsyncServer:
|
|
||||||
"""Get the global Socket.IO server instance."""
|
|
||||||
global _socketio_instance
|
|
||||||
if _socketio_instance is None:
|
|
||||||
_socketio_instance = sio
|
|
||||||
return _socketio_instance
|
|
||||||
|
|
||||||
|
|
||||||
def create_socketio_app(app: FastAPI) -> socketio.ASGIApp:
|
def create_socketio_app(app: FastAPI) -> socketio.ASGIApp:
|
||||||
"""
|
"""
|
||||||
@@ -63,3 +52,24 @@ def create_socketio_app(app: FastAPI) -> socketio.ASGIApp:
|
|||||||
sio.app = app
|
sio.app = app
|
||||||
|
|
||||||
return socket_app
|
return socket_app
|
||||||
|
|
||||||
|
# Default Socket.IO event handlers
|
||||||
|
@sio.event
|
||||||
|
async def connect(sid, environ):
|
||||||
|
"""Handle new client connections."""
|
||||||
|
logger.info(f"Client connected: {sid}")
|
||||||
|
safe_logfire_info(f"Client connected: {sid}")
|
||||||
|
|
||||||
|
|
||||||
|
@sio.event
|
||||||
|
async def disconnect(sid):
|
||||||
|
"""Handle client disconnections."""
|
||||||
|
logger.info(f"Client disconnected: {sid}")
|
||||||
|
safe_logfire_info(f"Client disconnected: {sid}")
|
||||||
|
|
||||||
|
|
||||||
|
@sio.event
|
||||||
|
async def message(sid, data):
|
||||||
|
"""Handle incoming messages."""
|
||||||
|
logger.info(f"Received message from {sid}: {data}")
|
||||||
|
await sio.emit("response", {"data": "Message received!"}, to=sid)
|
||||||
Reference in New Issue
Block a user