# Base URL for the OpenAI instance (default is https://api.openai.com/v1) # OpenAI: https://api.openai.com/v1 # Ollama (example): http://localhost:11434/v1 # OpenRouter: https://openrouter.ai/api/v1 # Anthropic: https://api.anthropic.com/v1 BASE_URL= # For OpenAI: https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key # For Anthropic: https://console.anthropic.com/account/keys # For OpenRouter: https://openrouter.ai/keys # For Ollama, no need to set this unless you specifically configured an API key LLM_API_KEY= # Get your Open AI API Key by following these instructions - # https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key # Even if using Anthropic or OpenRouter, you still need to set this for the embedding model. # No need to set this if using Ollama. OPENAI_API_KEY= # For the Supabase version (sample_supabase_agent.py), set your Supabase URL and Service Key. # Get your SUPABASE_URL from the API section of your Supabase project settings - # https://supabase.com/dashboard/project//settings/api SUPABASE_URL= # Get your SUPABASE_SERVICE_KEY from the API section of your Supabase project settings - # https://supabase.com/dashboard/project//settings/api # On this page it is called the service_role secret. SUPABASE_SERVICE_KEY= # The LLM you want to use for the reasoner (o3-mini, R1, QwQ, etc.). # Example: o3-mini # Example: deepseek-r1:7b-8k REASONER_MODEL= # The LLM you want to use for the primary agent/coder. # Example: gpt-4o-mini # Example: qwen2.5:14b-instruct-8k PRIMARY_MODEL= # Embedding model you want to use # Example for Ollama: nomic-embed-text # Example for OpenAI: text-embedding-3-small EMBEDDING_MODEL=