mirror of
https://github.com/coleam00/Archon.git
synced 2025-12-24 02:39:17 -05:00
Starting the split of streamlit_ui.py, better error handling
This commit is contained in:
@@ -101,6 +101,12 @@ After installation, follow the guided setup process in the Intro section of the
|
||||
The Streamlit interface will guide you through each step with clear instructions and interactive elements.
|
||||
There are a good amount of steps for the setup but it goes quick!
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
If you encounter any errors when using Archon, please first check the logs in the "Agent Service" tab.
|
||||
Logs specifically for MCP are also logged to `workbench/logs.txt` (file is automatically created) so please
|
||||
check there. The goal is for you to have a clear error message before creating a bug here in the GitHub repo
|
||||
|
||||
## Project Evolution
|
||||
|
||||
### V1: Single-Agent Foundation
|
||||
|
||||
@@ -61,6 +61,7 @@ async def invoke_agent(request: InvokeRequest):
|
||||
return {"response": response}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Exception invoking Archon for thread {request.thread_id}: {str(e)}")
|
||||
write_to_log(f"Error processing message for thread {request.thread_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
1
streamlit_pages/__init__.py
Normal file
1
streamlit_pages/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# This file makes the streamlit_ui directory a Python package
|
||||
77
streamlit_pages/chat.py
Normal file
77
streamlit_pages/chat.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import streamlit as st
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the current directory to Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from archon.archon_graph import agentic_flow
|
||||
|
||||
@st.cache_resource
|
||||
def get_thread_id():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
thread_id = get_thread_id()
|
||||
|
||||
async def run_agent_with_streaming(user_input: str):
|
||||
"""
|
||||
Run the agent with streaming text for the user_input prompt,
|
||||
while maintaining the entire conversation in `st.session_state.messages`.
|
||||
"""
|
||||
config = {
|
||||
"configurable": {
|
||||
"thread_id": thread_id
|
||||
}
|
||||
}
|
||||
|
||||
# First message from user
|
||||
if len(st.session_state.messages) == 1:
|
||||
async for msg in agentic_flow.astream(
|
||||
{"latest_user_message": user_input}, config, stream_mode="custom"
|
||||
):
|
||||
yield msg
|
||||
# Continue the conversation
|
||||
else:
|
||||
async for msg in agentic_flow.astream(
|
||||
Command(resume=user_input), config, stream_mode="custom"
|
||||
):
|
||||
yield msg
|
||||
|
||||
async def chat_tab():
|
||||
"""Display the chat interface for talking to Archon"""
|
||||
st.write("Describe to me an AI agent you want to build and I'll code it for you with Pydantic AI.")
|
||||
st.write("Example: Build me an AI agent that can search the web with the Brave API.")
|
||||
|
||||
# Initialize chat history in session state if not present
|
||||
if "messages" not in st.session_state:
|
||||
st.session_state.messages = []
|
||||
|
||||
# Display chat messages from history on app rerun
|
||||
for message in st.session_state.messages:
|
||||
message_type = message["type"]
|
||||
if message_type in ["human", "ai", "system"]:
|
||||
with st.chat_message(message_type):
|
||||
st.markdown(message["content"])
|
||||
|
||||
# Chat input for the user
|
||||
user_input = st.chat_input("What do you want to build today?")
|
||||
|
||||
if user_input:
|
||||
# We append a new request to the conversation explicitly
|
||||
st.session_state.messages.append({"type": "human", "content": user_input})
|
||||
|
||||
# Display user prompt in the UI
|
||||
with st.chat_message("user"):
|
||||
st.markdown(user_input)
|
||||
|
||||
# Display assistant response in chat message container
|
||||
response_content = ""
|
||||
with st.chat_message("assistant"):
|
||||
message_placeholder = st.empty() # Placeholder for updating the message
|
||||
# Run the async generator to fetch responses
|
||||
async for chunk in run_agent_with_streaming(user_input):
|
||||
response_content += chunk
|
||||
# Update the placeholder with the current response content
|
||||
message_placeholder.markdown(response_content)
|
||||
|
||||
st.session_state.messages.append({"type": "ai", "content": response_content})
|
||||
140
streamlit_pages/intro.py
Normal file
140
streamlit_pages/intro.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import streamlit as st
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the parent directory to sys.path to allow importing from the parent directory
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from utils.utils import create_new_tab_button
|
||||
|
||||
def intro_tab():
|
||||
"""Display the introduction and setup guide for Archon"""
|
||||
# Display the banner image
|
||||
st.image("public/Archon.png", use_container_width=True)
|
||||
|
||||
# Welcome message
|
||||
st.markdown("""
|
||||
# Welcome to Archon!
|
||||
|
||||
Archon is an AI meta-agent designed to autonomously build, refine, and optimize other AI agents.
|
||||
|
||||
It serves both as a practical tool for developers and as an educational framework demonstrating the evolution of agentic systems.
|
||||
Archon is developed in iterations, starting with a simple Pydantic AI agent that can build other Pydantic AI agents,
|
||||
all the way to a full agentic workflow using LangGraph that can build other AI agents with any framework.
|
||||
|
||||
Through its iterative development, Archon showcases the power of planning, feedback loops, and domain-specific knowledge in creating robust AI agents.
|
||||
""")
|
||||
|
||||
# Setup guide with expandable sections
|
||||
st.markdown("## Setup Guide")
|
||||
st.markdown("Follow these concise steps to get Archon up and running (IMPORTANT: come back here after each step):")
|
||||
|
||||
# Step 1: Environment Configuration
|
||||
with st.expander("Step 1: Environment Configuration", expanded=True):
|
||||
st.markdown("""
|
||||
### Environment Configuration
|
||||
|
||||
First, you need to set up your environment variables:
|
||||
|
||||
1. Go to the **Environment** tab
|
||||
2. Configure the following essential variables:
|
||||
- `BASE_URL`: API endpoint (OpenAI, OpenRouter, or Ollama)
|
||||
- `LLM_API_KEY`: Your API key for the LLM service
|
||||
- `OPENAI_API_KEY`: Required for embeddings
|
||||
- `SUPABASE_URL`: Your Supabase project URL
|
||||
- `SUPABASE_SERVICE_KEY`: Your Supabase service key
|
||||
- `PRIMARY_MODEL`: Main agent model (e.g., gpt-4o-mini)
|
||||
- `REASONER_MODEL`: Planning model (e.g., o3-mini)
|
||||
|
||||
These settings determine how Archon connects to external services and which models it uses.
|
||||
""")
|
||||
# Add a button to navigate to the Environment tab
|
||||
create_new_tab_button("Go to Environment Section (New Tab)", "Environment", key="goto_env", use_container_width=True)
|
||||
|
||||
# Step 2: Database Setup
|
||||
with st.expander("Step 2: Database Setup", expanded=False):
|
||||
st.markdown("""
|
||||
### Database Setup
|
||||
|
||||
Archon uses Supabase for vector storage and retrieval:
|
||||
|
||||
1. Go to the **Database** tab
|
||||
2. Select your embedding dimensions (1536 for OpenAI, 768 for nomic-embed-text)
|
||||
3. Follow the instructions to create the `site_pages` table
|
||||
|
||||
This creates the necessary tables, indexes, and functions for vector similarity search.
|
||||
""")
|
||||
# Add a button to navigate to the Database tab
|
||||
create_new_tab_button("Go to Database Section (New Tab)", "Database", key="goto_db", use_container_width=True)
|
||||
|
||||
# Step 3: Documentation Crawling
|
||||
with st.expander("Step 3: Documentation Crawling", expanded=False):
|
||||
st.markdown("""
|
||||
### Documentation Crawling
|
||||
|
||||
Populate the database with framework documentation:
|
||||
|
||||
1. Go to the **Documentation** tab
|
||||
2. Click on "Crawl Pydantic AI Docs"
|
||||
3. Wait for the crawling process to complete
|
||||
|
||||
This step downloads and processes documentation, creating embeddings for semantic search.
|
||||
""")
|
||||
# Add a button to navigate to the Documentation tab
|
||||
create_new_tab_button("Go to the Documentation Section (New Tab)", "Documentation", key="goto_docs", use_container_width=True)
|
||||
|
||||
# Step 4: Agent Service
|
||||
with st.expander("Step 4: Agent Service Setup (for MCP)", expanded=False):
|
||||
st.markdown("""
|
||||
### MCP Agent Service Setup
|
||||
|
||||
Start the graph service for agent generation:
|
||||
|
||||
1. Go to the **Agent Service** tab
|
||||
2. Click on "Start Agent Service"
|
||||
3. Verify the service is running
|
||||
|
||||
The agent service powers the LangGraph workflow for agent creation.
|
||||
""")
|
||||
# Add a button to navigate to the Agent Service tab
|
||||
create_new_tab_button("Go to Agent Service Section (New Tab)", "Agent Service", key="goto_service", use_container_width=True)
|
||||
|
||||
# Step 5: MCP Configuration (Optional)
|
||||
with st.expander("Step 5: MCP Configuration (Optional)", expanded=False):
|
||||
st.markdown("""
|
||||
### MCP Configuration
|
||||
|
||||
For integration with AI IDEs:
|
||||
|
||||
1. Go to the **MCP** tab
|
||||
2. Select your IDE (Windsurf, Cursor, or Cline/Roo Code)
|
||||
3. Follow the instructions to configure your IDE
|
||||
|
||||
This enables you to use Archon directly from your AI-powered IDE.
|
||||
""")
|
||||
# Add a button to navigate to the MCP tab
|
||||
create_new_tab_button("Go to MCP Section (New Tab)", "MCP", key="goto_mcp", use_container_width=True)
|
||||
|
||||
# Step 6: Using Archon
|
||||
with st.expander("Step 6: Using Archon", expanded=False):
|
||||
st.markdown("""
|
||||
### Using Archon
|
||||
|
||||
Once everything is set up:
|
||||
|
||||
1. Go to the **Chat** tab
|
||||
2. Describe the agent you want to build
|
||||
3. Archon will plan and generate the necessary code
|
||||
|
||||
You can also use Archon directly from your AI IDE if you've configured MCP.
|
||||
""")
|
||||
# Add a button to navigate to the Chat tab
|
||||
create_new_tab_button("Go to Chat Section (New Tab)", "Chat", key="goto_chat", use_container_width=True)
|
||||
|
||||
# Resources
|
||||
st.markdown("""
|
||||
## Additional Resources
|
||||
|
||||
- [GitHub Repository](https://github.com/coleam00/archon)
|
||||
- [Archon Community Forum](https://thinktank.ottomator.ai/c/archon/30)
|
||||
- [GitHub Kanban Board](https://github.com/users/coleam00/projects/1)
|
||||
""")
|
||||
145
streamlit_pages/mcp.py
Normal file
145
streamlit_pages/mcp.py
Normal file
@@ -0,0 +1,145 @@
|
||||
import streamlit as st
|
||||
import platform
|
||||
import json
|
||||
import os
|
||||
|
||||
def generate_mcp_config(ide_type):
|
||||
"""
|
||||
Generate MCP configuration for the selected IDE type.
|
||||
"""
|
||||
# Get the absolute path to the current directory
|
||||
base_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Determine the correct python path based on the OS
|
||||
if platform.system() == "Windows":
|
||||
python_path = os.path.join(base_path, 'venv', 'Scripts', 'python.exe')
|
||||
else: # macOS or Linux
|
||||
python_path = os.path.join(base_path, 'venv', 'bin', 'python')
|
||||
|
||||
server_script_path = os.path.join(base_path, 'mcp', 'mcp_server.py')
|
||||
|
||||
# Create the config dictionary for Python
|
||||
python_config = {
|
||||
"mcpServers": {
|
||||
"archon": {
|
||||
"command": python_path,
|
||||
"args": [server_script_path]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Create the config dictionary for Docker
|
||||
docker_config = {
|
||||
"mcpServers": {
|
||||
"archon": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"-e",
|
||||
"GRAPH_SERVICE_URL",
|
||||
"archon-mcp:latest"
|
||||
],
|
||||
"env": {
|
||||
"GRAPH_SERVICE_URL": "http://host.docker.internal:8100"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Return appropriate configuration based on IDE type
|
||||
if ide_type == "Windsurf":
|
||||
return json.dumps(python_config, indent=2), json.dumps(docker_config, indent=2)
|
||||
elif ide_type == "Cursor":
|
||||
return f"{python_path} {server_script_path}", f"docker run -i --rm -e GRAPH_SERVICE_URL=http://host.docker.internal:8100 archon-mcp:latest"
|
||||
elif ide_type == "Cline/Roo Code":
|
||||
return json.dumps(python_config, indent=2), json.dumps(docker_config, indent=2)
|
||||
else:
|
||||
return "Unknown IDE type selected", "Unknown IDE type selected"
|
||||
|
||||
def mcp_tab():
|
||||
"""Display the MCP configuration interface"""
|
||||
st.header("MCP Configuration")
|
||||
st.write("Select your AI IDE to get the appropriate MCP configuration:")
|
||||
|
||||
# IDE selection with side-by-side buttons
|
||||
col1, col2, col3 = st.columns(3)
|
||||
|
||||
with col1:
|
||||
windsurf_button = st.button("Windsurf", use_container_width=True, key="windsurf_button")
|
||||
with col2:
|
||||
cursor_button = st.button("Cursor", use_container_width=True, key="cursor_button")
|
||||
with col3:
|
||||
cline_button = st.button("Cline/Roo Code", use_container_width=True, key="cline_button")
|
||||
|
||||
# Initialize session state for selected IDE if not present
|
||||
if "selected_ide" not in st.session_state:
|
||||
st.session_state.selected_ide = None
|
||||
|
||||
# Update selected IDE based on button clicks
|
||||
if windsurf_button:
|
||||
st.session_state.selected_ide = "Windsurf"
|
||||
elif cursor_button:
|
||||
st.session_state.selected_ide = "Cursor"
|
||||
elif cline_button:
|
||||
st.session_state.selected_ide = "Cline/Roo Code"
|
||||
|
||||
# Display configuration if an IDE is selected
|
||||
if st.session_state.selected_ide:
|
||||
selected_ide = st.session_state.selected_ide
|
||||
st.subheader(f"MCP Configuration for {selected_ide}")
|
||||
python_config, docker_config = generate_mcp_config(selected_ide)
|
||||
|
||||
# Configuration type tabs
|
||||
config_tab1, config_tab2 = st.tabs(["Docker Configuration", "Python Configuration"])
|
||||
|
||||
with config_tab1:
|
||||
st.markdown("### Docker Configuration")
|
||||
st.code(docker_config, language="json" if selected_ide != "Cursor" else None)
|
||||
|
||||
st.markdown("#### Requirements:")
|
||||
st.markdown("- Docker installed")
|
||||
st.markdown("- Run the setup script to build and start both containers:")
|
||||
st.code("python run_docker.py", language="bash")
|
||||
|
||||
with config_tab2:
|
||||
st.markdown("### Python Configuration")
|
||||
st.code(python_config, language="json" if selected_ide != "Cursor" else None)
|
||||
|
||||
st.markdown("#### Requirements:")
|
||||
st.markdown("- Python 3.11+ installed")
|
||||
st.markdown("- Virtual environment created and activated")
|
||||
st.markdown("- All dependencies installed via `pip install -r requirements.txt`")
|
||||
st.markdown("- Must be running Archon not within a container")
|
||||
|
||||
# Instructions based on IDE type
|
||||
st.markdown("---")
|
||||
st.markdown("### Setup Instructions")
|
||||
|
||||
if selected_ide == "Windsurf":
|
||||
st.markdown("""
|
||||
#### How to use in Windsurf:
|
||||
1. Click on the hammer icon above the chat input
|
||||
2. Click on "Configure"
|
||||
3. Paste the JSON from your preferred configuration tab above
|
||||
4. Click "Refresh" next to "Configure"
|
||||
""")
|
||||
elif selected_ide == "Cursor":
|
||||
st.markdown("""
|
||||
#### How to use in Cursor:
|
||||
1. Go to Cursor Settings > Features > MCP
|
||||
2. Click on "+ Add New MCP Server"
|
||||
3. Name: Archon
|
||||
4. Type: command (equivalent to stdio)
|
||||
5. Command: Paste the command from your preferred configuration tab above
|
||||
""")
|
||||
elif selected_ide == "Cline/Roo Code":
|
||||
st.markdown("""
|
||||
#### How to use in Cline or Roo Code:
|
||||
1. From the Cline/Roo Code extension, click the "MCP Server" tab
|
||||
2. Click the "Edit MCP Settings" button
|
||||
3. The MCP settings file should be displayed in a tab in VS Code
|
||||
4. Paste the JSON from your preferred configuration tab above
|
||||
5. Cline/Roo Code will automatically detect and start the MCP server
|
||||
""")
|
||||
94
streamlit_pages/styles.py
Normal file
94
streamlit_pages/styles.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""
|
||||
This module contains the CSS styles for the Streamlit UI.
|
||||
"""
|
||||
|
||||
import streamlit as st
|
||||
|
||||
def load_css():
|
||||
"""
|
||||
Load the custom CSS styles for the Archon UI.
|
||||
"""
|
||||
st.markdown("""
|
||||
<style>
|
||||
:root {
|
||||
--primary-color: #00CC99; /* Green */
|
||||
--secondary-color: #EB2D8C; /* Pink */
|
||||
--text-color: #262730;
|
||||
}
|
||||
|
||||
/* Style the buttons */
|
||||
.stButton > button {
|
||||
color: white;
|
||||
border: 2px solid var(--primary-color);
|
||||
padding: 0.5rem 1rem;
|
||||
font-weight: bold;
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.stButton > button:hover {
|
||||
color: white;
|
||||
border: 2px solid var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Override Streamlit's default focus styles that make buttons red */
|
||||
.stButton > button:focus,
|
||||
.stButton > button:focus:hover,
|
||||
.stButton > button:active,
|
||||
.stButton > button:active:hover {
|
||||
color: white !important;
|
||||
border: 2px solid var(--secondary-color) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
/* Style headers */
|
||||
h1, h2, h3 {
|
||||
color: var(--primary-color);
|
||||
}
|
||||
|
||||
/* Hide spans within h3 elements */
|
||||
h1 span, h2 span, h3 span {
|
||||
display: none !important;
|
||||
visibility: hidden;
|
||||
width: 0;
|
||||
height: 0;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* Style code blocks */
|
||||
pre {
|
||||
border-left: 4px solid var(--primary-color);
|
||||
}
|
||||
|
||||
/* Style links */
|
||||
a {
|
||||
color: var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Style the chat messages */
|
||||
.stChatMessage {
|
||||
border-left: 4px solid var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Style the chat input */
|
||||
.stChatInput > div {
|
||||
border: 2px solid var(--primary-color) !important;
|
||||
}
|
||||
|
||||
/* Remove red outline on focus */
|
||||
.stChatInput > div:focus-within {
|
||||
box-shadow: none !important;
|
||||
border: 2px solid var(--secondary-color) !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
/* Remove red outline on all inputs when focused */
|
||||
input:focus, textarea:focus, [contenteditable]:focus {
|
||||
box-shadow: none !important;
|
||||
border-color: var(--secondary-color) !important;
|
||||
outline: none !important;
|
||||
}
|
||||
</style>
|
||||
""", unsafe_allow_html=True)
|
||||
516
streamlit_ui.py
516
streamlit_ui.py
@@ -1,45 +1,36 @@
|
||||
from __future__ import annotations
|
||||
from supabase import Client, create_client
|
||||
from typing import Literal, TypedDict
|
||||
from langgraph.types import Command
|
||||
import os
|
||||
|
||||
import streamlit as st
|
||||
import logfire
|
||||
import asyncio
|
||||
import time
|
||||
import json
|
||||
import uuid
|
||||
import sys
|
||||
import platform
|
||||
import subprocess
|
||||
import threading
|
||||
import queue
|
||||
import webbrowser
|
||||
import importlib
|
||||
from urllib.parse import urlparse
|
||||
from openai import AsyncOpenAI
|
||||
from supabase import Client, create_client
|
||||
from dotenv import load_dotenv
|
||||
from utils.utils import get_env_var, save_env_var, write_to_log
|
||||
from future_enhancements import future_enhancements_tab
|
||||
import streamlit as st
|
||||
import subprocess
|
||||
import importlib
|
||||
import threading
|
||||
import platform
|
||||
import logfire
|
||||
import asyncio
|
||||
import queue
|
||||
import time
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Import all the message part classes
|
||||
from pydantic_ai.messages import (
|
||||
ModelMessage,
|
||||
ModelRequest,
|
||||
ModelResponse,
|
||||
SystemPromptPart,
|
||||
UserPromptPart,
|
||||
TextPart,
|
||||
ToolCallPart,
|
||||
ToolReturnPart,
|
||||
RetryPromptPart,
|
||||
ModelMessagesTypeAdapter
|
||||
# Set page config - must be the first Streamlit command
|
||||
st.set_page_config(
|
||||
page_title="Archon - Agent Builder",
|
||||
page_icon="🤖",
|
||||
layout="wide",
|
||||
)
|
||||
|
||||
# Add the current directory to Python path
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from archon.archon_graph import agentic_flow
|
||||
from utils.utils import get_env_var, save_env_var, write_to_log, create_new_tab_button
|
||||
from streamlit_pages.styles import load_css
|
||||
from streamlit_pages.intro import intro_tab
|
||||
from streamlit_pages.chat import chat_tab
|
||||
from streamlit_pages.mcp import mcp_tab
|
||||
from streamlit_pages.future_enhancements import future_enhancements_tab
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
@@ -65,120 +56,8 @@ if get_env_var("SUPABASE_URL"):
|
||||
else:
|
||||
supabase = None
|
||||
|
||||
# Set page config - must be the first Streamlit command
|
||||
st.set_page_config(
|
||||
page_title="Archon - Agent Builder",
|
||||
page_icon="🤖",
|
||||
layout="wide",
|
||||
)
|
||||
|
||||
# Set custom theme colors to match Archon logo (green and pink)
|
||||
# Primary color (green) and secondary color (pink)
|
||||
st.markdown("""
|
||||
<style>
|
||||
:root {
|
||||
--primary-color: #00CC99; /* Green */
|
||||
--secondary-color: #EB2D8C; /* Pink */
|
||||
--text-color: #262730;
|
||||
}
|
||||
|
||||
/* Style the buttons */
|
||||
.stButton > button {
|
||||
color: white;
|
||||
border: 2px solid var(--primary-color);
|
||||
padding: 0.5rem 1rem;
|
||||
font-weight: bold;
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.stButton > button:hover {
|
||||
color: white;
|
||||
border: 2px solid var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Override Streamlit's default focus styles that make buttons red */
|
||||
.stButton > button:focus,
|
||||
.stButton > button:focus:hover,
|
||||
.stButton > button:active,
|
||||
.stButton > button:active:hover {
|
||||
color: white !important;
|
||||
border: 2px solid var(--secondary-color) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
/* Style headers */
|
||||
h1, h2, h3 {
|
||||
color: var(--primary-color);
|
||||
}
|
||||
|
||||
/* Hide spans within h3 elements */
|
||||
h1 span, h2 span, h3 span {
|
||||
display: none !important;
|
||||
visibility: hidden;
|
||||
width: 0;
|
||||
height: 0;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* Style code blocks */
|
||||
pre {
|
||||
border-left: 4px solid var(--primary-color);
|
||||
}
|
||||
|
||||
/* Style links */
|
||||
a {
|
||||
color: var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Style the chat messages */
|
||||
.stChatMessage {
|
||||
border-left: 4px solid var(--secondary-color);
|
||||
}
|
||||
|
||||
/* Style the chat input */
|
||||
.stChatInput > div {
|
||||
border: 2px solid var(--primary-color) !important;
|
||||
}
|
||||
|
||||
/* Remove red outline on focus */
|
||||
.stChatInput > div:focus-within {
|
||||
box-shadow: none !important;
|
||||
border: 2px solid var(--secondary-color) !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
/* Remove red outline on all inputs when focused */
|
||||
input:focus, textarea:focus, [contenteditable]:focus {
|
||||
box-shadow: none !important;
|
||||
border-color: var(--secondary-color) !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
</style>
|
||||
""", unsafe_allow_html=True)
|
||||
|
||||
# Helper function to create a button that opens a tab in a new window
|
||||
def create_new_tab_button(label, tab_name, key=None, use_container_width=False):
|
||||
"""Create a button that opens a specified tab in a new browser window"""
|
||||
# Create a unique key if none provided
|
||||
if key is None:
|
||||
key = f"new_tab_{tab_name.lower().replace(' ', '_')}"
|
||||
|
||||
# Get the base URL
|
||||
base_url = st.query_params.get("base_url", "")
|
||||
if not base_url:
|
||||
# If base_url is not in query params, use the default localhost URL
|
||||
base_url = "http://localhost:8501"
|
||||
|
||||
# Create the URL for the new tab
|
||||
new_tab_url = f"{base_url}/?tab={tab_name}"
|
||||
|
||||
# Create a button that will open the URL in a new tab when clicked
|
||||
if st.button(label, key=key, use_container_width=use_container_width):
|
||||
webbrowser.open_new_tab(new_tab_url)
|
||||
# Load custom CSS styles
|
||||
load_css()
|
||||
|
||||
# Function to reload the archon_graph module
|
||||
def reload_archon_graph():
|
||||
@@ -201,349 +80,6 @@ def reload_archon_graph():
|
||||
# Configure logfire to suppress warnings (optional)
|
||||
logfire.configure(send_to_logfire='never')
|
||||
|
||||
@st.cache_resource
|
||||
def get_thread_id():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
thread_id = get_thread_id()
|
||||
|
||||
async def run_agent_with_streaming(user_input: str):
|
||||
"""
|
||||
Run the agent with streaming text for the user_input prompt,
|
||||
while maintaining the entire conversation in `st.session_state.messages`.
|
||||
"""
|
||||
config = {
|
||||
"configurable": {
|
||||
"thread_id": thread_id
|
||||
}
|
||||
}
|
||||
|
||||
# First message from user
|
||||
if len(st.session_state.messages) == 1:
|
||||
async for msg in agentic_flow.astream(
|
||||
{"latest_user_message": user_input}, config, stream_mode="custom"
|
||||
):
|
||||
yield msg
|
||||
# Continue the conversation
|
||||
else:
|
||||
async for msg in agentic_flow.astream(
|
||||
Command(resume=user_input), config, stream_mode="custom"
|
||||
):
|
||||
yield msg
|
||||
|
||||
def generate_mcp_config(ide_type):
|
||||
"""
|
||||
Generate MCP configuration for the selected IDE type.
|
||||
"""
|
||||
# Get the absolute path to the current directory
|
||||
base_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Determine the correct python path based on the OS
|
||||
if platform.system() == "Windows":
|
||||
python_path = os.path.join(base_path, 'venv', 'Scripts', 'python.exe')
|
||||
else: # macOS or Linux
|
||||
python_path = os.path.join(base_path, 'venv', 'bin', 'python')
|
||||
|
||||
server_script_path = os.path.join(base_path, 'mcp', 'mcp_server.py')
|
||||
|
||||
# Create the config dictionary for Python
|
||||
python_config = {
|
||||
"mcpServers": {
|
||||
"archon": {
|
||||
"command": python_path,
|
||||
"args": [server_script_path]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Create the config dictionary for Docker
|
||||
docker_config = {
|
||||
"mcpServers": {
|
||||
"archon": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"-e",
|
||||
"GRAPH_SERVICE_URL",
|
||||
"archon-mcp:latest"
|
||||
],
|
||||
"env": {
|
||||
"GRAPH_SERVICE_URL": "http://host.docker.internal:8100"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Return appropriate configuration based on IDE type
|
||||
if ide_type == "Windsurf":
|
||||
return json.dumps(python_config, indent=2), json.dumps(docker_config, indent=2)
|
||||
elif ide_type == "Cursor":
|
||||
return f"{python_path} {server_script_path}", f"docker run -i --rm -e GRAPH_SERVICE_URL=http://host.docker.internal:8100 archon-mcp:latest"
|
||||
elif ide_type == "Cline":
|
||||
return json.dumps(python_config, indent=2), json.dumps(docker_config, indent=2) # Assuming Cline uses the same format as Windsurf
|
||||
else:
|
||||
return "Unknown IDE type selected", "Unknown IDE type selected"
|
||||
|
||||
def mcp_tab():
|
||||
"""Display the MCP configuration interface"""
|
||||
st.header("MCP Configuration")
|
||||
st.write("Select your AI IDE to get the appropriate MCP configuration:")
|
||||
|
||||
# IDE selection with side-by-side buttons
|
||||
col1, col2, col3 = st.columns(3)
|
||||
|
||||
with col1:
|
||||
windsurf_button = st.button("Windsurf", use_container_width=True, key="windsurf_button")
|
||||
with col2:
|
||||
cursor_button = st.button("Cursor", use_container_width=True, key="cursor_button")
|
||||
with col3:
|
||||
cline_button = st.button("Cline/Roo Code", use_container_width=True, key="cline_button")
|
||||
|
||||
# Initialize session state for selected IDE if not present
|
||||
if "selected_ide" not in st.session_state:
|
||||
st.session_state.selected_ide = None
|
||||
|
||||
# Update selected IDE based on button clicks
|
||||
if windsurf_button:
|
||||
st.session_state.selected_ide = "Windsurf"
|
||||
elif cursor_button:
|
||||
st.session_state.selected_ide = "Cursor"
|
||||
elif cline_button:
|
||||
st.session_state.selected_ide = "Cline/Roo Code"
|
||||
|
||||
# Display configuration if an IDE is selected
|
||||
if st.session_state.selected_ide:
|
||||
selected_ide = st.session_state.selected_ide
|
||||
st.subheader(f"MCP Configuration for {selected_ide}")
|
||||
python_config, docker_config = generate_mcp_config(selected_ide)
|
||||
|
||||
# Configuration type tabs
|
||||
config_tab1, config_tab2 = st.tabs(["Docker Configuration", "Python Configuration"])
|
||||
|
||||
with config_tab1:
|
||||
st.markdown("### Docker Configuration")
|
||||
st.code(docker_config, language="json" if selected_ide != "Cursor" else None)
|
||||
|
||||
st.markdown("#### Requirements:")
|
||||
st.markdown("- Docker installed")
|
||||
st.markdown("- Run the setup script to build and start both containers:")
|
||||
st.code("python run_docker.py", language="bash")
|
||||
|
||||
with config_tab2:
|
||||
st.markdown("### Python Configuration")
|
||||
st.code(python_config, language="json" if selected_ide != "Cursor" else None)
|
||||
|
||||
st.markdown("#### Requirements:")
|
||||
st.markdown("- Python 3.11+ installed")
|
||||
st.markdown("- Virtual environment created and activated")
|
||||
st.markdown("- All dependencies installed via `pip install -r requirements.txt`")
|
||||
st.markdown("- Must be running Archon not within a container")
|
||||
|
||||
# Instructions based on IDE type
|
||||
st.markdown("---")
|
||||
st.markdown("### Setup Instructions")
|
||||
|
||||
if selected_ide == "Windsurf":
|
||||
st.markdown("""
|
||||
#### How to use in Windsurf:
|
||||
1. Click on the hammer icon above the chat input
|
||||
2. Click on "Configure"
|
||||
3. Paste the JSON from your preferred configuration tab above
|
||||
4. Click "Refresh" next to "Configure"
|
||||
""")
|
||||
elif selected_ide == "Cursor":
|
||||
st.markdown("""
|
||||
#### How to use in Cursor:
|
||||
1. Go to Cursor Settings > Features > MCP
|
||||
2. Click on "+ Add New MCP Server"
|
||||
3. Name: Archon
|
||||
4. Type: command (equivalent to stdio)
|
||||
5. Command: Paste the command from your preferred configuration tab above
|
||||
""")
|
||||
elif selected_ide == "Cline/Roo Code":
|
||||
st.markdown("""
|
||||
#### How to use in Cline or Roo Code:
|
||||
1. From the Cline/Roo Code extension, click the "MCP Server" tab
|
||||
2. Click the "Edit MCP Settings" button
|
||||
3. The MCP settings file should be displayed in a tab in VS Code
|
||||
4. Paste the JSON from your preferred configuration tab above
|
||||
5. Cline/Roo Code will automatically detect and start the MCP server
|
||||
""")
|
||||
|
||||
async def chat_tab():
|
||||
"""Display the chat interface for talking to Archon"""
|
||||
st.write("Describe to me an AI agent you want to build and I'll code it for you with Pydantic AI.")
|
||||
st.write("Example: Build me an AI agent that can search the web with the Brave API.")
|
||||
|
||||
# Initialize chat history in session state if not present
|
||||
if "messages" not in st.session_state:
|
||||
st.session_state.messages = []
|
||||
|
||||
# Display chat messages from history on app rerun
|
||||
for message in st.session_state.messages:
|
||||
message_type = message["type"]
|
||||
if message_type in ["human", "ai", "system"]:
|
||||
with st.chat_message(message_type):
|
||||
st.markdown(message["content"])
|
||||
|
||||
# Chat input for the user
|
||||
user_input = st.chat_input("What do you want to build today?")
|
||||
|
||||
if user_input:
|
||||
# We append a new request to the conversation explicitly
|
||||
st.session_state.messages.append({"type": "human", "content": user_input})
|
||||
|
||||
# Display user prompt in the UI
|
||||
with st.chat_message("user"):
|
||||
st.markdown(user_input)
|
||||
|
||||
# Display assistant response in chat message container
|
||||
response_content = ""
|
||||
with st.chat_message("assistant"):
|
||||
message_placeholder = st.empty() # Placeholder for updating the message
|
||||
# Run the async generator to fetch responses
|
||||
async for chunk in run_agent_with_streaming(user_input):
|
||||
response_content += chunk
|
||||
# Update the placeholder with the current response content
|
||||
message_placeholder.markdown(response_content)
|
||||
|
||||
st.session_state.messages.append({"type": "ai", "content": response_content})
|
||||
|
||||
def intro_tab():
|
||||
"""Display the introduction and setup guide for Archon"""
|
||||
# Display the banner image
|
||||
st.image("public/Archon.png", use_container_width=True)
|
||||
|
||||
# Welcome message
|
||||
st.markdown("""
|
||||
# Welcome to Archon!
|
||||
|
||||
Archon is an AI meta-agent designed to autonomously build, refine, and optimize other AI agents.
|
||||
|
||||
It serves both as a practical tool for developers and as an educational framework demonstrating the evolution of agentic systems.
|
||||
Archon is developed in iterations, starting with a simple Pydantic AI agent that can build other Pydantic AI agents,
|
||||
all the way to a full agentic workflow using LangGraph that can build other AI agents with any framework.
|
||||
|
||||
Through its iterative development, Archon showcases the power of planning, feedback loops, and domain-specific knowledge in creating robust AI agents.
|
||||
""")
|
||||
|
||||
# Setup guide with expandable sections
|
||||
st.markdown("## Setup Guide")
|
||||
st.markdown("Follow these concise steps to get Archon up and running (IMPORTANT: come back here after each step):")
|
||||
|
||||
# Step 1: Environment Configuration
|
||||
with st.expander("Step 1: Environment Configuration", expanded=True):
|
||||
st.markdown("""
|
||||
### Environment Configuration
|
||||
|
||||
First, you need to set up your environment variables:
|
||||
|
||||
1. Go to the **Environment** tab
|
||||
2. Configure the following essential variables:
|
||||
- `BASE_URL`: API endpoint (OpenAI, OpenRouter, or Ollama)
|
||||
- `LLM_API_KEY`: Your API key for the LLM service
|
||||
- `OPENAI_API_KEY`: Required for embeddings
|
||||
- `SUPABASE_URL`: Your Supabase project URL
|
||||
- `SUPABASE_SERVICE_KEY`: Your Supabase service key
|
||||
- `PRIMARY_MODEL`: Main agent model (e.g., gpt-4o-mini)
|
||||
- `REASONER_MODEL`: Planning model (e.g., o3-mini)
|
||||
|
||||
These settings determine how Archon connects to external services and which models it uses.
|
||||
""")
|
||||
# Add a button to navigate to the Environment tab
|
||||
create_new_tab_button("Go to Environment Section (New Tab)", "Environment", key="goto_env", use_container_width=True)
|
||||
|
||||
# Step 2: Database Setup
|
||||
with st.expander("Step 2: Database Setup", expanded=False):
|
||||
st.markdown("""
|
||||
### Database Setup
|
||||
|
||||
Archon uses Supabase for vector storage and retrieval:
|
||||
|
||||
1. Go to the **Database** tab
|
||||
2. Select your embedding dimensions (1536 for OpenAI, 768 for nomic-embed-text)
|
||||
3. Follow the instructions to create the `site_pages` table
|
||||
|
||||
This creates the necessary tables, indexes, and functions for vector similarity search.
|
||||
""")
|
||||
# Add a button to navigate to the Database tab
|
||||
create_new_tab_button("Go to Database Section (New Tab)", "Database", key="goto_db", use_container_width=True)
|
||||
|
||||
# Step 3: Documentation Crawling
|
||||
with st.expander("Step 3: Documentation Crawling", expanded=False):
|
||||
st.markdown("""
|
||||
### Documentation Crawling
|
||||
|
||||
Populate the database with framework documentation:
|
||||
|
||||
1. Go to the **Documentation** tab
|
||||
2. Click on "Crawl Pydantic AI Docs"
|
||||
3. Wait for the crawling process to complete
|
||||
|
||||
This step downloads and processes documentation, creating embeddings for semantic search.
|
||||
""")
|
||||
# Add a button to navigate to the Documentation tab
|
||||
create_new_tab_button("Go to the Documentation Section (New Tab)", "Documentation", key="goto_docs", use_container_width=True)
|
||||
|
||||
# Step 4: Agent Service
|
||||
with st.expander("Step 4: Agent Service Setup (for MCP)", expanded=False):
|
||||
st.markdown("""
|
||||
### MCP Agent Service Setup
|
||||
|
||||
Start the graph service for agent generation:
|
||||
|
||||
1. Go to the **Agent Service** tab
|
||||
2. Click on "Start Agent Service"
|
||||
3. Verify the service is running
|
||||
|
||||
The agent service powers the LangGraph workflow for agent creation.
|
||||
""")
|
||||
# Add a button to navigate to the Agent Service tab
|
||||
create_new_tab_button("Go to Agent Service Section (New Tab)", "Agent Service", key="goto_service", use_container_width=True)
|
||||
|
||||
# Step 5: MCP Configuration (Optional)
|
||||
with st.expander("Step 5: MCP Configuration (Optional)", expanded=False):
|
||||
st.markdown("""
|
||||
### MCP Configuration
|
||||
|
||||
For integration with AI IDEs:
|
||||
|
||||
1. Go to the **MCP** tab
|
||||
2. Select your IDE (Windsurf, Cursor, or Cline/Roo Code)
|
||||
3. Follow the instructions to configure your IDE
|
||||
|
||||
This enables you to use Archon directly from your AI-powered IDE.
|
||||
""")
|
||||
# Add a button to navigate to the MCP tab
|
||||
create_new_tab_button("Go to MCP Section (New Tab)", "MCP", key="goto_mcp", use_container_width=True)
|
||||
|
||||
# Step 6: Using Archon
|
||||
with st.expander("Step 6: Using Archon", expanded=False):
|
||||
st.markdown("""
|
||||
### Using Archon
|
||||
|
||||
Once everything is set up:
|
||||
|
||||
1. Go to the **Chat** tab
|
||||
2. Describe the agent you want to build
|
||||
3. Archon will plan and generate the necessary code
|
||||
|
||||
You can also use Archon directly from your AI IDE if you've configured MCP.
|
||||
""")
|
||||
# Add a button to navigate to the Chat tab
|
||||
create_new_tab_button("Go to Chat Section (New Tab)", "Chat", key="goto_chat", use_container_width=True)
|
||||
|
||||
# Resources
|
||||
st.markdown("""
|
||||
## Additional Resources
|
||||
|
||||
- [GitHub Repository](https://github.com/coleam00/archon)
|
||||
- [Archon Community Forum](https://thinktank.ottomator.ai/c/archon/30)
|
||||
- [GitHub Kanban Board](https://github.com/users/coleam00/projects/1)
|
||||
""")
|
||||
|
||||
def documentation_tab():
|
||||
"""Display the documentation interface"""
|
||||
st.header("Documentation")
|
||||
|
||||
@@ -5,6 +5,8 @@ import inspect
|
||||
import json
|
||||
from typing import Optional
|
||||
from dotenv import load_dotenv
|
||||
import streamlit as st
|
||||
import webbrowser
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
@@ -109,3 +111,23 @@ def log_node_execution(func):
|
||||
write_to_log(f"Error in node {func_name}: {str(e)}")
|
||||
raise
|
||||
return wrapper
|
||||
|
||||
# Helper function to create a button that opens a tab in a new window
|
||||
def create_new_tab_button(label, tab_name, key=None, use_container_width=False):
|
||||
"""Create a button that opens a specified tab in a new browser window"""
|
||||
# Create a unique key if none provided
|
||||
if key is None:
|
||||
key = f"new_tab_{tab_name.lower().replace(' ', '_')}"
|
||||
|
||||
# Get the base URL
|
||||
base_url = st.query_params.get("base_url", "")
|
||||
if not base_url:
|
||||
# If base_url is not in query params, use the default localhost URL
|
||||
base_url = "http://localhost:8501"
|
||||
|
||||
# Create the URL for the new tab
|
||||
new_tab_url = f"{base_url}/?tab={tab_name}"
|
||||
|
||||
# Create a button that will open the URL in a new tab when clicked
|
||||
if st.button(label, key=key, use_container_width=use_container_width):
|
||||
webbrowser.open_new_tab(new_tab_url)
|
||||
|
||||
Reference in New Issue
Block a user