mirror of
https://github.com/coleam00/Archon.git
synced 2025-12-24 02:39:17 -05:00
The New Archon (Beta) - The Operating System for AI Coding Assistants!
This commit is contained in:
55
.env.example
55
.env.example
@@ -1,23 +1,6 @@
|
||||
# Base URL for the OpenAI instance (default is https://api.openai.com/v1)
|
||||
# OpenAI: https://api.openai.com/v1
|
||||
# Ollama (example): http://localhost:11434/v1
|
||||
# OpenRouter: https://openrouter.ai/api/v1
|
||||
# Anthropic: https://api.anthropic.com/v1
|
||||
BASE_URL=
|
||||
# Minimal startup configuration - only Supabase connection required
|
||||
# All other settings (API keys, model choices, RAG flags) are managed via the Settings page
|
||||
|
||||
# For OpenAI: https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
||||
# For Anthropic: https://console.anthropic.com/account/keys
|
||||
# For OpenRouter: https://openrouter.ai/keys
|
||||
# For Ollama, no need to set this unless you specifically configured an API key
|
||||
LLM_API_KEY=
|
||||
|
||||
# Get your Open AI API Key by following these instructions -
|
||||
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
||||
# Even if using Anthropic or OpenRouter, you still need to set this for the embedding model.
|
||||
# No need to set this if using Ollama.
|
||||
OPENAI_API_KEY=
|
||||
|
||||
# For the Supabase version (sample_supabase_agent.py), set your Supabase URL and Service Key.
|
||||
# Get your SUPABASE_URL from the API section of your Supabase project settings -
|
||||
# https://supabase.com/dashboard/project/<your project ID>/settings/api
|
||||
SUPABASE_URL=
|
||||
@@ -27,17 +10,27 @@ SUPABASE_URL=
|
||||
# On this page it is called the service_role secret.
|
||||
SUPABASE_SERVICE_KEY=
|
||||
|
||||
# The LLM you want to use for the reasoner (o3-mini, R1, QwQ, etc.).
|
||||
# Example: o3-mini
|
||||
# Example: deepseek-r1:7b-8k
|
||||
REASONER_MODEL=
|
||||
# Optional: Set log level for debugging
|
||||
LOGFIRE_TOKEN=
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# The LLM you want to use for the primary agent/coder.
|
||||
# Example: gpt-4o-mini
|
||||
# Example: qwen2.5:14b-instruct-8k
|
||||
PRIMARY_MODEL=
|
||||
# Service Ports Configuration
|
||||
# These ports are used for external access to the services
|
||||
HOST=localhost
|
||||
ARCHON_SERVER_PORT=8181
|
||||
ARCHON_MCP_PORT=8051
|
||||
ARCHON_AGENTS_PORT=8052
|
||||
ARCHON_UI_PORT=3737
|
||||
ARCHON_DOCS_PORT=3838
|
||||
|
||||
# Embedding model you want to use
|
||||
# Example for Ollama: nomic-embed-text
|
||||
# Example for OpenAI: text-embedding-3-small
|
||||
EMBEDDING_MODEL=
|
||||
# Embedding Configuration
|
||||
# Dimensions for embedding vectors (1536 for OpenAI text-embedding-3-small)
|
||||
EMBEDDING_DIMENSIONS=1536
|
||||
|
||||
# NOTE: All other configuration has been moved to database management!
|
||||
# Run the credentials_setup.sql file in your Supabase SQL editor to set up the credentials table.
|
||||
# Then use the Settings page in the web UI to manage:
|
||||
# - OPENAI_API_KEY (encrypted)
|
||||
# - MODEL_CHOICE
|
||||
# - TRANSPORT settings
|
||||
# - RAG strategy flags (USE_CONTEXTUAL_EMBEDDINGS, USE_HYBRID_SEARCH, etc.)
|
||||
Reference in New Issue
Block a user