mirror of
https://github.com/coleam00/Archon.git
synced 2025-12-30 21:49:30 -05:00
Fix CodeRabbit review issues: credential casing, client cleanup, and TypeScript interfaces
- Fix credential key casing mismatch in credential_service.py (LLM_PROVIDER vs llm_provider) - Add proper OpenAI client cleanup to prevent resource leaks in llm_provider_service.py - Add missing LLM_INSTANCE_NAME and OLLAMA_EMBEDDING_INSTANCE_NAME fields to RAGSettings interface These fixes address critical CodeRabbit review comments and resolve TypeScript compilation errors.
This commit is contained in:
@@ -472,7 +472,7 @@ class CredentialService:
|
||||
try:
|
||||
# For now, we'll update the RAG strategy settings
|
||||
return await self.set_credential(
|
||||
"llm_provider",
|
||||
"LLM_PROVIDER",
|
||||
provider,
|
||||
category="rag_strategy",
|
||||
description=f"Active {service_type} provider",
|
||||
|
||||
@@ -143,7 +143,8 @@ async def get_llm_client(provider: str | None = None, use_embedding_provider: bo
|
||||
raise
|
||||
finally:
|
||||
# Cleanup if needed
|
||||
pass
|
||||
if client and hasattr(client, "close"):
|
||||
await client.close()
|
||||
|
||||
|
||||
async def _get_optimal_ollama_instance(instance_type: str | None = None,
|
||||
|
||||
Reference in New Issue
Block a user