From fc3c77e943b4220250283837c0d2ae4820ec9cc3 Mon Sep 17 00:00:00 2001 From: cannuri <91494156+cannuri@users.noreply.github.com> Date: Sun, 9 Mar 2025 13:06:49 +0100 Subject: [PATCH] Mention Roo Code alongside with Cline (#48) --- README.md | 2 +- future_enhancements.py | 2 +- iterations/v4-streamlit-ui-overhaul/README.md | 4 ++-- .../v4-streamlit-ui-overhaul/streamlit_ui.py | 8 ++++---- streamlit_ui.py | 14 +++++++------- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 9f414fe6..f8d009d7 100644 --- a/README.md +++ b/README.md @@ -194,7 +194,7 @@ The Docker implementation consists of two containers: - Implements the Model Context Protocol for AI IDE integration - Built from the mcp/Dockerfile - Communicates with the main container's Graph Service - - Provides a standardized interface for AI IDEs like Windsurf, Cursor, and Cline + - Provides a standardized interface for AI IDEs like Windsurf, Cursor, Cline, and Roo Code When running with Docker, the `run_docker.py` script automates building and starting both containers with the proper configuration. diff --git a/future_enhancements.py b/future_enhancements.py index 27a86d13..8f18f179 100644 --- a/future_enhancements.py +++ b/future_enhancements.py @@ -759,7 +759,7 @@ def future_enhancements_tab(): - Integrate seamlessly with existing development workflows """) - st.warning("The Model Context Protocol (MCP) is an emerging standard for AI assistant integration with IDEs like Windsurf, Cursor, and Cline.") + st.warning("The Model Context Protocol (MCP) is an emerging standard for AI assistant integration with IDEs like Windsurf, Cursor, Cline, and Roo Code.") # Other Frameworks with st.expander("Other Frameworks besides Pydantic AI"): diff --git a/iterations/v4-streamlit-ui-overhaul/README.md b/iterations/v4-streamlit-ui-overhaul/README.md index 7378329a..82f0a3aa 100644 --- a/iterations/v4-streamlit-ui-overhaul/README.md +++ b/iterations/v4-streamlit-ui-overhaul/README.md @@ -129,7 +129,7 @@ The Agent Service tab allows you to manage the agent service: The MCP tab simplifies the process of configuring MCP for AI IDEs: -- Select your IDE (Windsurf, Cursor, or Cline) +- Select your IDE (Windsurf, Cursor, Cline, or Roo Code) - Generate configuration commands or JSON - Copy configuration to clipboard - Get step-by-step instructions for your specific IDE @@ -177,7 +177,7 @@ The Docker implementation consists of two containers: - Implements the Model Context Protocol for AI IDE integration - Built from the mcp/Dockerfile - Communicates with the main container's Graph Service - - Provides a standardized interface for AI IDEs like Windsurf, Cursor, and Cline + - Provides a standardized interface for AI IDEs like Windsurf, Cursor, Cline and Roo Code When running with Docker, the `run_docker.py` script automates building and starting both containers with the proper configuration. diff --git a/iterations/v4-streamlit-ui-overhaul/streamlit_ui.py b/iterations/v4-streamlit-ui-overhaul/streamlit_ui.py index 1e13a260..3e8aa05c 100644 --- a/iterations/v4-streamlit-ui-overhaul/streamlit_ui.py +++ b/iterations/v4-streamlit-ui-overhaul/streamlit_ui.py @@ -364,12 +364,12 @@ def mcp_tab(): """) elif selected_ide == "Cline": st.markdown(""" - #### How to use in Cline: - 1. From the Cline extension, click the "MCP Server" tab + #### How to use in Cline or Roo Code: + 1. From the Cline/Roo Code extension, click the "MCP Server" tab 2. Click the "Edit MCP Settings" button 3. The MCP settings file should be displayed in a tab in VS Code 4. Paste the JSON from your preferred configuration tab above - 5. Cline will automatically detect and start the MCP server + 5. Cline/Roo Code will automatically detect and start the MCP server """) async def chat_tab(): @@ -511,7 +511,7 @@ def intro_tab(): For integration with AI IDEs: 1. Go to the **MCP** tab - 2. Select your IDE (Windsurf, Cursor, or Cline) + 2. Select your IDE (Windsurf, Cursor, Cline, or Roo Code) 3. Follow the instructions to configure your IDE This enables you to use Archon directly from your AI-powered IDE. diff --git a/streamlit_ui.py b/streamlit_ui.py index 3623ec30..37c4f92a 100644 --- a/streamlit_ui.py +++ b/streamlit_ui.py @@ -299,7 +299,7 @@ def mcp_tab(): with col2: cursor_button = st.button("Cursor", use_container_width=True, key="cursor_button") with col3: - cline_button = st.button("Cline", use_container_width=True, key="cline_button") + cline_button = st.button("Cline/Roo Code", use_container_width=True, key="cline_button") # Initialize session state for selected IDE if not present if "selected_ide" not in st.session_state: @@ -311,7 +311,7 @@ def mcp_tab(): elif cursor_button: st.session_state.selected_ide = "Cursor" elif cline_button: - st.session_state.selected_ide = "Cline" + st.session_state.selected_ide = "Cline/Roo Code" # Display configuration if an IDE is selected if st.session_state.selected_ide: @@ -362,14 +362,14 @@ def mcp_tab(): 4. Type: command (equivalent to stdio) 5. Command: Paste the command from your preferred configuration tab above """) - elif selected_ide == "Cline": + elif selected_ide == "Cline/Roo Code": st.markdown(""" - #### How to use in Cline: - 1. From the Cline extension, click the "MCP Server" tab + #### How to use in Cline or Roo Code: + 1. From the Cline/Roo Code extension, click the "MCP Server" tab 2. Click the "Edit MCP Settings" button 3. The MCP settings file should be displayed in a tab in VS Code 4. Paste the JSON from your preferred configuration tab above - 5. Cline will automatically detect and start the MCP server + 5. Cline/Roo Code will automatically detect and start the MCP server """) async def chat_tab(): @@ -511,7 +511,7 @@ def intro_tab(): For integration with AI IDEs: 1. Go to the **MCP** tab - 2. Select your IDE (Windsurf, Cursor, or Cline) + 2. Select your IDE (Windsurf, Cursor, or Cline/Roo Code) 3. Follow the instructions to configure your IDE This enables you to use Archon directly from your AI-powered IDE.