Archon onboarding, README updates, and MCP/global rule expansion for more coding assistants

This commit is contained in:
Cole Medin
2025-08-13 18:36:36 -05:00
parent 8d189b9946
commit bb64af9e7a
19 changed files with 808 additions and 180 deletions

View File

@@ -554,13 +554,12 @@ async def create_task(request: CreateTaskRequest):
async def list_tasks(
status: str | None = None,
project_id: str | None = None,
parent_task_id: str | None = None,
include_closed: bool = False,
page: int = 1,
per_page: int = 50,
exclude_large_fields: bool = False,
):
"""List tasks with optional filters including status, project, and parent task."""
"""List tasks with optional filters including status and project."""
try:
logfire.info(
f"Listing tasks | status={status} | project_id={project_id} | include_closed={include_closed} | page={page} | per_page={per_page}"
@@ -570,7 +569,6 @@ async def list_tasks(
task_service = TaskService()
success, result = task_service.list_tasks(
project_id=project_id,
parent_task_id=parent_task_id,
status=status,
include_closed=include_closed,
)

View File

@@ -30,9 +30,9 @@ class RAGStrategyConfig:
"""Configuration for RAG strategies."""
use_contextual_embeddings: bool = False
use_hybrid_search: bool = False
use_agentic_rag: bool = False
use_reranking: bool = False
use_hybrid_search: bool = True
use_agentic_rag: bool = True
use_reranking: bool = True
def validate_openai_api_key(api_key: str) -> bool:

View File

@@ -837,7 +837,7 @@ async def add_code_examples_to_supabase(
full_documents.append(full_doc)
# Generate contextual embeddings
contextual_results = generate_contextual_embeddings_batch(
contextual_results = await generate_contextual_embeddings_batch(
full_documents, combined_texts
)

View File

@@ -205,9 +205,9 @@ async def add_documents_to_supabase(
sub_batch_contents = batch_contents[ctx_i:ctx_end]
sub_batch_docs = full_documents[ctx_i:ctx_end]
# Process sub-batch with a single API call using asyncio.to_thread
sub_results = await asyncio.to_thread(
generate_contextual_embeddings_batch, sub_batch_docs, sub_batch_contents
# Process sub-batch with a single API call
sub_results = await generate_contextual_embeddings_batch(
sub_batch_docs, sub_batch_contents
)
# Extract results from this sub-batch