Compare commits

...

30 Commits

Author SHA1 Message Date
samanhappy
b279a1a62c chore: update mcp sdk dependencies to latest versions (#546) 2026-01-01 22:41:46 +08:00
dependabot[bot]
760cc462b9 chore(deps-dev): bump tsx from 4.20.5 to 4.21.0 (#541)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-01 22:01:13 +08:00
dependabot[bot]
431bc8f6f8 chore(deps-dev): bump next from 15.5.9 to 16.1.1 (#543)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-01 22:00:51 +08:00
dependabot[bot]
fb6af75f5b chore(deps-dev): bump ts-jest from 29.4.1 to 29.4.6 (#540)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-01 22:00:17 +08:00
dependabot[bot]
33b440973f chore(deps): bump axios from 1.13.1 to 1.13.2 (#544)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-01 21:59:57 +08:00
dependabot[bot]
2d248e953e chore(deps): bump dotenv from 16.6.1 to 17.2.3 (#542)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-01 21:59:38 +08:00
samanhappy
d36c6ac5ad fix: rename DATABASE_URL to DB_URL for consistency across configurations (#545) 2026-01-01 21:58:11 +08:00
Zhyim
0be6c36e12 feat: implement pagination for server list with customizable items pe… (#534) 2026-01-01 13:36:09 +08:00
samanhappy
7f2fca9636 feat: add proxy configuration support for STDIO servers on Linux and macOS (#537) 2026-01-01 12:45:50 +08:00
samanhappy
8ae542bdab Add server renaming functionality (#533) 2025-12-30 18:45:33 +08:00
samanhappy
88ce94b988 docs: update and expand MCPHub development guide and agent instructions (#532) 2025-12-28 14:25:28 +08:00
samanhappy
7cc330e721 fix: ensure database is initialized before saving tool embeddings (#531) 2025-12-28 12:19:21 +08:00
Copilot
ab338e80a7 Add custom access type for bearer keys to support combined group and server scoping (#530)
Co-authored-by: samanhappy <samanhappy@gmail.com>
2025-12-27 16:16:50 +08:00
cheezmil
b00e1c81fc fix: Found 1 error in src/services/keepAliveService.ts:51 #521 (#522)
Co-authored-by: cheestard <134115886+cheestard@users.noreply.github.com>
2025-12-25 09:15:07 +08:00
samanhappy
33eae50bd3 Refactor smart routing configuration and async database handling (#519) 2025-12-20 12:16:09 +08:00
samanhappy
eb1a965e45 feat: add authentication status listener to refresh settings on user login (#518) 2025-12-17 18:34:07 +08:00
samanhappy
97114dcabb feat: implement batch saving for smart routing configuration (#517) 2025-12-17 15:26:53 +08:00
samanhappy
350a022ea3 feat: enhance login error handling and add server unavailable message (#516) 2025-12-17 13:24:07 +08:00
samanhappy
292876a991 feat: update PostgreSQL images to pgvector/pgvector:pg17 across configurations (#513) 2025-12-16 15:40:06 +08:00
samanhappy
d6a9146e27 feat: enhance OAuth token logging and add authentication error handling in tool calls (#512) 2025-12-16 15:16:43 +08:00
samanhappy
1f3a6794ea feat: enhance BearerKeyDaoImpl to handle migration and caching behavior for bearer keys (#507) 2025-12-14 20:40:57 +08:00
samanhappy
c673afb97e Add HTTP/HTTPS proxy configuration and environment variable support (#506) 2025-12-14 15:44:44 +08:00
samanhappy
01855ca2ca feat: add bearer authentication key management with migration support (#503) 2025-12-13 16:46:58 +08:00
dependabot[bot]
88efad9d60 chore(deps-dev): bump next from 15.5.7 to 15.5.9 (#501)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-12 14:50:18 +08:00
samanhappy
2028233b53 Add OpenAPI support and enhance settings aggregation (#500) 2025-12-11 17:42:50 +08:00
samanhappy
1dfa0a990b Add batch server and group creation functionality (#499) 2025-12-11 14:21:58 +08:00
Alptekin Gülcan
ab7c210281 Optimizing API Operations: Simplified operationId Values and Large String Parameter Management (#488) 2025-12-07 13:11:35 +08:00
Copilot
6bd28ec89b Upgrade react and react-dom to 19.2.1 (#489)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: samanhappy <2755122+samanhappy@users.noreply.github.com>
2025-12-06 15:58:14 +08:00
Copilot
41a42f82d0 Upgrade js-yaml to 4.1.1 (#486)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: samanhappy <2755122+samanhappy@users.noreply.github.com>
2025-12-05 18:11:26 +08:00
Copilot
7aa3ff3bb1 Upgrade glob to version 10.5.0 (#485)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: samanhappy <2755122+samanhappy@users.noreply.github.com>
2025-12-05 17:52:46 +08:00
85 changed files with 5854 additions and 1384 deletions

View File

@@ -1,272 +0,0 @@
# MCPHub Coding Instructions
**ALWAYS follow these instructions first and only fallback to additional search and context gathering if the information here is incomplete or found to be in error.**
## Project Overview
MCPHub is a TypeScript/Node.js MCP (Model Context Protocol) server management hub that provides unified access through HTTP endpoints. It serves as a centralized dashboard for managing multiple MCP servers with real-time monitoring, authentication, and flexible routing.
**Core Components:**
- **Backend**: Express.js + TypeScript + ESM (`src/server.ts`)
- **Frontend**: React/Vite + Tailwind CSS (`frontend/`)
- **MCP Integration**: Connects multiple MCP servers (`src/services/mcpService.ts`)
- **Authentication**: JWT-based with bcrypt password hashing
- **Configuration**: JSON-based MCP server definitions (`mcp_settings.json`)
- **Documentation**: API docs and usage instructions(`docs/`)
## Working Effectively
### Bootstrap and Setup (CRITICAL - Follow Exact Steps)
```bash
# Install pnpm if not available
npm install -g pnpm
# Install dependencies - takes ~30 seconds
pnpm install
# Setup environment (optional)
cp .env.example .env
# Build and test to verify setup
pnpm lint # ~3 seconds - NEVER CANCEL
pnpm backend:build # ~5 seconds - NEVER CANCEL
pnpm test:ci # ~16 seconds - NEVER CANCEL. Set timeout to 60+ seconds
pnpm frontend:build # ~5 seconds - NEVER CANCEL
pnpm build # ~10 seconds total - NEVER CANCEL. Set timeout to 60+ seconds
```
**CRITICAL TIMING**: These commands are fast but NEVER CANCEL them. Always wait for completion.
### Development Environment
```bash
# Start both backend and frontend (recommended for most development)
pnpm dev # Backend on :3001, Frontend on :5173
# OR start separately (required on Windows, optional on Linux/macOS)
# Terminal 1: Backend only
pnpm backend:dev # Runs on port 3000 (or PORT env var)
# Terminal 2: Frontend only
pnpm frontend:dev # Runs on port 5173, proxies API to backend
```
**NEVER CANCEL**: Development servers may take 10-15 seconds to fully initialize all MCP servers.
### Build Commands (Production)
```bash
# Full production build - takes ~10 seconds total
pnpm build # NEVER CANCEL - Set timeout to 60+ seconds
# Individual builds
pnpm backend:build # TypeScript compilation - ~5 seconds
pnpm frontend:build # Vite build - ~5 seconds
# Start production server
pnpm start # Requires dist/ and frontend/dist/ to exist
```
### Testing and Validation
```bash
# Run all tests - takes ~16 seconds with 73 tests
pnpm test:ci # NEVER CANCEL - Set timeout to 60+ seconds
# Development testing
pnpm test # Interactive mode
pnpm test:watch # Watch mode for development
pnpm test:coverage # With coverage report
# Code quality
pnpm lint # ESLint - ~3 seconds
pnpm format # Prettier formatting - ~3 seconds
```
**CRITICAL**: All tests MUST pass before committing. Do not modify tests to make them pass unless specifically required for your changes.
## Manual Validation Requirements
**ALWAYS perform these validation steps after making changes:**
### 1. Basic Application Functionality
```bash
# Start the application
pnpm dev
# Verify backend responds (in another terminal)
curl http://localhost:3000/api/health
# Expected: Should return health status
# Verify frontend serves
curl -I http://localhost:3000/
# Expected: HTTP 200 OK with HTML content
```
### 2. MCP Server Integration Test
```bash
# Check MCP servers are loading (look for log messages)
# Expected log output should include:
# - "Successfully connected client for server: [name]"
# - "Successfully listed [N] tools for server: [name]"
# - Some servers may fail due to missing API keys (normal in dev)
```
### 3. Build Verification
```bash
# Verify production build works
pnpm build
node scripts/verify-dist.js
# Expected: "✅ Verification passed! Frontend and backend dist files are present."
```
**NEVER skip these validation steps**. If any fail, debug and fix before proceeding.
## Project Structure and Key Files
### Critical Backend Files
- `src/index.ts` - Application entry point
- `src/server.ts` - Express server setup and middleware
- `src/services/mcpService.ts` - **Core MCP server management logic**
- `src/config/index.ts` - Configuration management
- `src/routes/` - HTTP route definitions
- `src/controllers/` - HTTP request handlers
- `src/dao/` - Data access layer (supports JSON file & PostgreSQL)
- `src/db/` - TypeORM entities & repositories (for PostgreSQL mode)
- `src/types/index.ts` - TypeScript type definitions
### DAO Layer (Dual Data Source)
MCPHub supports **JSON file** (default) and **PostgreSQL** storage:
- Set `USE_DB=true` + `DB_URL=postgresql://...` to use database
- When modifying data structures, update: `src/types/`, `src/dao/`, `src/db/entities/`, `src/db/repositories/`, `src/utils/migration.ts`
- See `AGENTS.md` for detailed DAO modification checklist
### Critical Frontend Files
- `frontend/src/` - React application source
- `frontend/src/pages/` - Page components (development entry point)
- `frontend/src/components/` - Reusable UI components
- `frontend/src/utils/fetchInterceptor.js` - Backend API interaction
### Configuration Files
- `mcp_settings.json` - **MCP server definitions and user accounts**
- `package.json` - Dependencies and scripts
- `tsconfig.json` - TypeScript configuration
- `jest.config.cjs` - Test configuration
- `.eslintrc.json` - Linting rules
### Docker and Deployment
- `Dockerfile` - Multi-stage build with Python base + Node.js
- `entrypoint.sh` - Docker startup script
- `bin/cli.js` - NPM package CLI entry point
## Development Process and Conventions
### Code Style Requirements
- **ESM modules**: Always use `.js` extensions in imports, not `.ts`
- **English only**: All code comments must be written in English
- **TypeScript strict**: Follow strict type checking rules
- **Import style**: `import { something } from './file.js'` (note .js extension)
### Key Configuration Notes
- **MCP servers**: Defined in `mcp_settings.json` with command/args
- **Endpoints**: `/mcp/{group|server}` and `/mcp/$smart` for routing
- **i18n**: Frontend uses react-i18next with files in `locales/` folder
- **Authentication**: JWT tokens with bcrypt password hashing
- **Default credentials**: admin/admin123 (configured in mcp_settings.json)
### Development Entry Points
- **Add MCP server**: Modify `mcp_settings.json` and restart
- **New API endpoint**: Add route in `src/routes/`, controller in `src/controllers/`
- **Frontend feature**: Start from `frontend/src/pages/` or `frontend/src/components/`
- **Add tests**: Follow patterns in `tests/` directory
### Common Development Tasks
#### Adding a new MCP server:
1. Add server definition to `mcp_settings.json`
2. Restart backend to load new server
3. Check logs for successful connection
4. Test via dashboard or API endpoints
#### API development:
1. Define route in `src/routes/`
2. Implement controller in `src/controllers/`
3. Add types in `src/types/index.ts` if needed
4. Write tests in `tests/controllers/`
#### Frontend development:
1. Create/modify components in `frontend/src/components/`
2. Add pages in `frontend/src/pages/`
3. Update routing if needed
4. Test in development mode with `pnpm frontend:dev`
#### Documentation:
1. Update or add docs in `docs/` folder
2. Ensure README.md reflects any major changes
## Validation and CI Requirements
### Before Committing - ALWAYS Run:
```bash
pnpm lint # Must pass - ~3 seconds
pnpm backend:build # Must compile - ~5 seconds
pnpm test:ci # All tests must pass - ~16 seconds
pnpm build # Full build must work - ~10 seconds
```
**CRITICAL**: CI will fail if any of these commands fail. Fix issues locally first.
### CI Pipeline (.github/workflows/ci.yml)
- Runs on Node.js 20.x
- Tests: linting, type checking, unit tests with coverage
- **NEVER CANCEL**: CI builds may take 2-3 minutes total
## Troubleshooting
### Common Issues
- **"uvx command not found"**: Some MCP servers require `uvx` (Python package manager) - this is expected in development
- **Port already in use**: Change PORT environment variable or kill existing processes
- **Frontend not loading**: Ensure frontend was built with `pnpm frontend:build`
- **MCP server connection failed**: Check server command/args in `mcp_settings.json`
### Build Failures
- **TypeScript errors**: Run `pnpm backend:build` to see compilation errors
- **Test failures**: Run `pnpm test:verbose` for detailed test output
- **Lint errors**: Run `pnpm lint` and fix reported issues
### Development Issues
- **Backend not starting**: Check for port conflicts, verify `mcp_settings.json` syntax
- **Frontend proxy errors**: Ensure backend is running before starting frontend
- **Hot reload not working**: Restart development server
## Performance Notes
- **Install time**: pnpm install takes ~30 seconds
- **Build time**: Full build takes ~10 seconds
- **Test time**: Complete test suite takes ~16 seconds
- **Startup time**: Backend initialization takes 10-15 seconds (MCP server connections)
**Remember**: NEVER CANCEL any build or test commands. Always wait for completion even if they seem slow.

View File

@@ -76,7 +76,7 @@ jobs:
# services:
# postgres:
# image: postgres:15
# image: pgvector/pgvector:pg17
# env:
# POSTGRES_PASSWORD: postgres
# POSTGRES_DB: mcphub_test
@@ -106,7 +106,7 @@ jobs:
# - name: Run integration tests
# run: |
# export DATABASE_URL="postgresql://postgres:postgres@localhost:5432/mcphub_test"
# export DB_URL="postgresql://postgres:postgres@localhost:5432/mcphub_test"
# node test-integration.ts
# env:
# NODE_ENV: test

386
AGENTS.md
View File

@@ -1,26 +1,214 @@
# Repository Guidelines
# MCPHub Development Guide & Agent Instructions
These notes align current contributors around the code layout, daily commands, and collaboration habits that keep `@samanhappy/mcphub` moving quickly.
**ALWAYS follow these instructions first and only fallback to additional search and context gathering if the information here is incomplete or found to be in error.**
This document serves as the primary reference for all contributors and AI agents working on `@samanhappy/mcphub`. It provides comprehensive guidance on code organization, development workflow, and project conventions.
## Project Overview
MCPHub is a TypeScript/Node.js MCP (Model Context Protocol) server management hub that provides unified access through HTTP endpoints. It serves as a centralized dashboard for managing multiple MCP servers with real-time monitoring, authentication, and flexible routing.
**Core Components:**
- **Backend**: Express.js + TypeScript + ESM (`src/server.ts`)
- **Frontend**: React/Vite + Tailwind CSS (`frontend/`)
- **MCP Integration**: Connects multiple MCP servers (`src/services/mcpService.ts`)
- **Authentication**: JWT-based with bcrypt password hashing
- **Configuration**: JSON-based MCP server definitions (`mcp_settings.json`)
- **Documentation**: API docs and usage instructions(`docs/`)
## Bootstrap and Setup (CRITICAL - Follow Exact Steps)
```bash
# Install pnpm if not available
npm install -g pnpm
# Install dependencies - takes ~30 seconds
pnpm install
# Setup environment (optional)
cp .env.example .env
# Build and test to verify setup
pnpm lint # ~3 seconds - NEVER CANCEL
pnpm backend:build # ~5 seconds - NEVER CANCEL
pnpm test:ci # ~16 seconds - NEVER CANCEL. Set timeout to 60+ seconds
pnpm frontend:build # ~5 seconds - NEVER CANCEL
pnpm build # ~10 seconds total - NEVER CANCEL. Set timeout to 60+ seconds
```
**CRITICAL TIMING**: These commands are fast but NEVER CANCEL them. Always wait for completion.
## Manual Validation Requirements
**ALWAYS perform these validation steps after making changes:**
### 1. Basic Application Functionality
```bash
# Start the application
pnpm dev
# Verify backend responds (in another terminal)
curl http://localhost:3000/api/health
# Expected: Should return health status
# Verify frontend serves
curl -I http://localhost:3000/
# Expected: HTTP 200 OK with HTML content
```
### 2. MCP Server Integration Test
```bash
# Check MCP servers are loading (look for log messages)
# Expected log output should include:
# - "Successfully connected client for server: [name]"
# - "Successfully listed [N] tools for server: [name]"
# - Some servers may fail due to missing API keys (normal in dev)
```
### 3. Build Verification
```bash
# Verify production build works
pnpm build
node scripts/verify-dist.js
# Expected: "✅ Verification passed! Frontend and backend dist files are present."
```
**NEVER skip these validation steps**. If any fail, debug and fix before proceeding.
## Project Structure & Module Organization
- Backend services live in `src`, grouped by responsibility (`controllers/`, `services/`, `dao/`, `routes/`, `utils/`), with `server.ts` orchestrating HTTP bootstrap.
- `frontend/src` contains the Vite + React dashboard; `frontend/public` hosts static assets and translations sit in `locales/`.
- Jest-aware test code is split between colocated specs (`src/**/*.{test,spec}.ts`) and higher-level suites in `tests/`; use `tests/utils/` helpers when exercising the CLI or SSE flows.
- Build artifacts and bundles are generated into `dist/`, `frontend/dist/`, and `coverage/`; never edit these manually.
### Critical Backend Files
- `src/index.ts` - Application entry point
- `src/server.ts` - Express server setup and middleware (orchestrating HTTP bootstrap)
- `src/services/mcpService.ts` - **Core MCP server management logic**
- `src/config/index.ts` - Configuration management
- `src/routes/` - HTTP route definitions
- `src/controllers/` - HTTP request handlers
- `src/dao/` - Data access layer (supports JSON file & PostgreSQL)
- `src/db/` - TypeORM entities & repositories (for PostgreSQL mode)
- `src/types/index.ts` - TypeScript type definitions and shared DTOs
- `src/utils/` - Utility functions and helpers
### Critical Frontend Files
- `frontend/src/` - React application source (Vite + React dashboard)
- `frontend/src/pages/` - Page components (development entry point)
- `frontend/src/components/` - Reusable UI components
- `frontend/src/utils/fetchInterceptor.js` - Backend API interaction
- `frontend/public/` - Static assets
### Configuration Files
- `mcp_settings.json` - **MCP server definitions and user accounts**
- `package.json` - Dependencies and scripts
- `tsconfig.json` - TypeScript configuration
- `jest.config.cjs` - Test configuration
- `.eslintrc.json` - Linting rules
### Test Organization
- Jest-aware test code is split between colocated specs (`src/**/*.{test,spec}.ts`) and higher-level suites in `tests/`
- Use `tests/utils/` helpers when exercising the CLI or SSE flows
- Mirror production directory names when adding new suites
- End filenames with `.test.ts` or `.spec.ts` for automatic discovery
### Build Artifacts
- `dist/` - Backend build output (TypeScript compilation)
- `frontend/dist/` - Frontend build output (Vite bundle)
- `coverage/` - Test coverage reports
- **Never edit these manually**
### Localization
- Translations sit in `locales/` (en.json, fr.json, tr.json, zh.json)
- Frontend uses react-i18next
### Docker and Deployment
- `Dockerfile` - Multi-stage build with Python base + Node.js
- `entrypoint.sh` - Docker startup script
- `bin/cli.js` - NPM package CLI entry point
## Build, Test, and Development Commands
- `pnpm dev` runs backend (`tsx watch src/index.ts`) and frontend (`vite`) together for local iteration.
- `pnpm backend:dev`, `pnpm frontend:dev`, and `pnpm frontend:preview` target each surface independently; prefer them when debugging one stack.
- `pnpm build` executes `pnpm backend:build` (TypeScript to `dist/`) and `pnpm frontend:build`; run before release or publishing.
- `pnpm test`, `pnpm test:watch`, and `pnpm test:coverage` drive Jest; `pnpm lint` and `pnpm format` enforce style via ESLint and Prettier.
### Development Environment
```bash
# Start both backend and frontend (recommended for most development)
pnpm dev # Backend on :3001, Frontend on :5173
# OR start separately (required on Windows, optional on Linux/macOS)
# Terminal 1: Backend only
pnpm backend:dev # Runs on port 3000 (or PORT env var)
# Terminal 2: Frontend only
pnpm frontend:dev # Runs on port 5173, proxies API to backend
# Frontend preview (production build)
pnpm frontend:preview # Preview production build
```
**NEVER CANCEL**: Development servers may take 10-15 seconds to fully initialize all MCP servers.
### Production Build
```bash
# Full production build - takes ~10 seconds total
pnpm build # NEVER CANCEL - Set timeout to 60+ seconds
# Individual builds
pnpm backend:build # TypeScript compilation to dist/ - ~5 seconds
pnpm frontend:build # Vite build to frontend/dist/ - ~5 seconds
# Start production server
pnpm start # Requires dist/ and frontend/dist/ to exist
```
Run `pnpm build` before release or publishing.
### Testing and Validation
```bash
# Run all tests - takes ~16 seconds with 73 tests
pnpm test:ci # NEVER CANCEL - Set timeout to 60+ seconds
# Development testing
pnpm test # Interactive mode
pnpm test:watch # Watch mode for development
pnpm test:coverage # With coverage report
# Code quality
pnpm lint # ESLint - ~3 seconds
pnpm format # Prettier formatting - ~3 seconds
```
**CRITICAL**: All tests MUST pass before committing. Do not modify tests to make them pass unless specifically required for your changes.
### Performance Notes
- **Install time**: pnpm install takes ~30 seconds
- **Build time**: Full build takes ~10 seconds
- **Test time**: Complete test suite takes ~16 seconds
- **Startup time**: Backend initialization takes 10-15 seconds (MCP server connections)
## Coding Style & Naming Conventions
- TypeScript everywhere; default to 2-space indentation and single quotes, letting Prettier settle formatting. ESLint configuration assumes ES modules.
- Name services and data access layers with suffixes (`UserService`, `AuthDao`), React components and files in `PascalCase`, and utility modules in `camelCase`.
- Keep DTOs and shared types in `src/types` to avoid duplication; re-export through index files only when it clarifies imports.
- **TypeScript everywhere**: Default to 2-space indentation and single quotes, letting Prettier settle formatting
- **ESM modules**: Always use `.js` extensions in imports, not `.ts` (e.g., `import { something } from './file.js'`)
- **English only**: All code comments must be written in English
- **TypeScript strict**: Follow strict type checking rules
- **Naming conventions**:
- Services and data access layers: Use suffixes (`UserService`, `AuthDao`)
- React components and files: `PascalCase`
- Utility modules: `camelCase`
- **Types and DTOs**: Keep in `src/types` to avoid duplication; re-export through index files only when it clarifies imports
- **ESLint configuration**: Assumes ES modules
## Testing Guidelines
@@ -28,12 +216,86 @@ These notes align current contributors around the code layout, daily commands, a
- Mirror production directory names when adding new suites and end filenames with `.test.ts` or `.spec.ts` for automatic discovery.
- Aim to maintain or raise coverage when touching critical flows (auth, OAuth, SSE); add integration tests under `tests/integration/` when touching cross-service logic.
## Key Configuration Notes
- **MCP servers**: Defined in `mcp_settings.json` with command/args
- **Endpoints**: `/mcp/{group|server}` and `/mcp/$smart` for routing
- **i18n**: Frontend uses react-i18next with files in `locales/` folder
- **Authentication**: JWT tokens with bcrypt password hashing
- **Default credentials**: admin/admin123 (configured in mcp_settings.json)
## Development Entry Points
### Adding a new MCP server
1. Add server definition to `mcp_settings.json`
2. Restart backend to load new server
3. Check logs for successful connection
4. Test via dashboard or API endpoints
### API development
1. Define route in `src/routes/`
2. Implement controller in `src/controllers/`
3. Add types in `src/types/index.ts` if needed
4. Write tests in `tests/controllers/`
### Frontend development
1. Create/modify components in `frontend/src/components/`
2. Add pages in `frontend/src/pages/`
3. Update routing if needed
4. Test in development mode with `pnpm frontend:dev`
### Documentation
1. Update or add docs in `docs/` folder
2. Ensure README.md reflects any major changes
## Commit & Pull Request Guidelines
- Follow the existing Conventional Commit pattern (`feat:`, `fix:`, `chore:`, etc.) with imperative, present-tense summaries and optional multi-line context.
- Each PR should describe the behavior change, list testing performed, and link issues; include before/after screenshots or GIFs for frontend tweaks.
- Re-run `pnpm build` and `pnpm test` before requesting review, and ensure generated artifacts stay out of the diff.
### Before Committing - ALWAYS Run
```bash
pnpm lint # Must pass - ~3 seconds
pnpm backend:build # Must compile - ~5 seconds
pnpm test:ci # All tests must pass - ~16 seconds
pnpm build # Full build must work - ~10 seconds
```
**CRITICAL**: CI will fail if any of these commands fail. Fix issues locally first.
### CI Pipeline (.github/workflows/ci.yml)
- Runs on Node.js 20.x
- Tests: linting, type checking, unit tests with coverage
- **NEVER CANCEL**: CI builds may take 2-3 minutes total
## Troubleshooting
### Common Issues
- **"uvx command not found"**: Some MCP servers require `uvx` (Python package manager) - this is expected in development
- **Port already in use**: Change PORT environment variable or kill existing processes
- **Frontend not loading**: Ensure frontend was built with `pnpm frontend:build`
- **MCP server connection failed**: Check server command/args in `mcp_settings.json`
### Build Failures
- **TypeScript errors**: Run `pnpm backend:build` to see compilation errors
- **Test failures**: Run `pnpm test:verbose` for detailed test output
- **Lint errors**: Run `pnpm lint` and fix reported issues
### Development Issues
- **Backend not starting**: Check for port conflicts, verify `mcp_settings.json` syntax
- **Frontend proxy errors**: Ensure backend is running before starting frontend
- **Hot reload not working**: Restart development server
## DAO Layer & Dual Data Source
MCPHub supports **JSON file** (default) and **PostgreSQL** storage. Set `USE_DB=true` + `DB_URL` to switch.
@@ -63,16 +325,100 @@ When adding/changing fields, update **ALL** these files:
### Data Type Mapping
| Model | DAO | DB Entity | JSON Path |
| -------------- | ----------------- | -------------- | ------------------------ |
| `IUser` | `UserDao` | `User` | `settings.users[]` |
| `ServerConfig` | `ServerDao` | `Server` | `settings.mcpServers{}` |
| `IGroup` | `GroupDao` | `Group` | `settings.groups[]` |
| `SystemConfig` | `SystemConfigDao` | `SystemConfig` | `settings.systemConfig` |
| `UserConfig` | `UserConfigDao` | `UserConfig` | `settings.userConfigs{}` |
| Model | DAO | DB Entity | JSON Path |
| -------------- | ----------------- | -------------- | ------------------------- |
| `IUser` | `UserDao` | `User` | `settings.users[]` |
| `ServerConfig` | `ServerDao` | `Server` | `settings.mcpServers{}` |
| `IGroup` | `GroupDao` | `Group` | `settings.groups[]` |
| `SystemConfig` | `SystemConfigDao` | `SystemConfig` | `settings.systemConfig` |
| `UserConfig` | `UserConfigDao` | `UserConfig` | `settings.userConfigs{}` |
| `BearerKey` | `BearerKeyDao` | `BearerKey` | `settings.bearerKeys[]` |
| `IOAuthClient` | `OAuthClientDao` | `OAuthClient` | `settings.oauthClients[]` |
| `IOAuthToken` | `OAuthTokenDao` | `OAuthToken` | `settings.oauthTokens[]` |
### Common Pitfalls
- Forgetting migration script → fields won't migrate to DB
- Optional fields need `nullable: true` in entity
- Complex objects need `simple-json` column type
## Auto-Evolution Guidelines for AI Agents
**This section provides guidelines for AI agents to automatically maintain and improve this document.**
### When to Update AGENTS.md
AI agents MUST update this document in the following situations:
#### 1. Code-Documentation Mismatch Detected
When executing tasks, if you discover that:
- The actual code structure differs from descriptions in this document
- File paths, imports, or module organization has changed
- New critical files or directories exist that aren't documented
- Documented files or patterns no longer exist
**Action**: Immediately update the relevant section to reflect the current codebase state.
**Example scenarios**:
- A controller is now in `src/api/controllers/` instead of `src/controllers/`
- New middleware files exist that should be documented
- The DAO implementation has been refactored with a different structure
- Build output directories have changed
#### 2. User Preferences and Requirements
During conversation, if the user expresses:
- **Coding preferences**: Indentation style, naming conventions, code organization patterns
- **Workflow requirements**: Required validation steps, commit procedures, testing expectations
- **Tool preferences**: Preferred libraries, frameworks, or development tools
- **Quality standards**: Code review criteria, documentation requirements, error handling patterns
- **Development principles**: Architecture decisions, design patterns, best practices
**Action**: Add or update the relevant section to capture these preferences for future reference.
**Example scenarios**:
- User prefers async/await over promises → Update coding style section
- User requires specific test coverage thresholds → Update testing guidelines
- User has strong opinions about error handling → Add to development process section
- User establishes new deployment procedures → Update deployment section
### How to Update AGENTS.md
1. **Identify the Section**: Determine which section needs updating based on the type of change
2. **Make Precise Changes**: Update only the relevant content, maintaining the document structure
3. **Preserve Format**: Keep the existing markdown formatting and organization
4. **Add Context**: If adding new content, ensure it fits logically within existing sections
5. **Verify Accuracy**: After updating, ensure the new information is accurate and complete
### Update Principles
- **Accuracy First**: Documentation must reflect the actual current state
- **Clarity**: Use clear, concise language; avoid ambiguity
- **Completeness**: Include sufficient detail for agents to work effectively
- **Consistency**: Maintain consistent terminology and formatting throughout
- **Actionability**: Focus on concrete, actionable guidance rather than vague descriptions
### Self-Correction Process
Before completing any task:
1. Review relevant sections of AGENTS.md
2. During execution, note any discrepancies between documentation and reality
3. Update AGENTS.md to correct discrepancies
4. Verify the update doesn't conflict with other sections
5. Proceed with the original task using the updated information
### Meta-Update Rule
If this auto-evolution section itself needs improvement based on experience:
- Update it to better serve future agents
- Add new scenarios or principles as they emerge
- Refine the update process based on what works well
**Remember**: This document is a living guide. Keeping it accurate and current is as important as following it.

View File

@@ -3,7 +3,7 @@ version: "3.8"
services:
# PostgreSQL database for MCPHub configuration
postgres:
image: postgres:16-alpine
image: pgvector/pgvector:pg17-alpine
container_name: mcphub-postgres
environment:
POSTGRES_DB: mcphub

View File

@@ -59,7 +59,7 @@ version: '3.8'
services:
postgres:
image: postgres:16
image: pgvector/pgvector:pg17
environment:
POSTGRES_DB: mcphub
POSTGRES_USER: mcphub

View File

@@ -106,7 +106,7 @@ services:
- NODE_ENV=production
- PORT=3000
- JWT_SECRET=${JWT_SECRET:-your-jwt-secret}
- DATABASE_URL=postgresql://mcphub:password@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:password@postgres:5432/mcphub
volumes:
- ./mcp_settings.json:/app/mcp_settings.json:ro
- ./servers.json:/app/servers.json:ro
@@ -119,7 +119,7 @@ services:
- mcphub-network
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres
environment:
- POSTGRES_DB=mcphub
@@ -180,7 +180,7 @@ services:
- PORT=3000
- JWT_SECRET=${JWT_SECRET}
- JWT_EXPIRES_IN=${JWT_EXPIRES_IN:-24h}
- DATABASE_URL=postgresql://mcphub:${POSTGRES_PASSWORD}@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:${POSTGRES_PASSWORD}@postgres:5432/mcphub
- OPENAI_API_KEY=${OPENAI_API_KEY}
- REDIS_URL=redis://redis:6379
volumes:
@@ -203,7 +203,7 @@ services:
retries: 3
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres
environment:
- POSTGRES_DB=mcphub
@@ -293,7 +293,7 @@ services:
environment:
- NODE_ENV=development
- PORT=3000
- DATABASE_URL=postgresql://mcphub:password@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:password@postgres:5432/mcphub
volumes:
- .:/app
- /app/node_modules
@@ -305,7 +305,7 @@ services:
- mcphub-dev
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres-dev
environment:
- POSTGRES_DB=mcphub
@@ -445,7 +445,7 @@ Add backup service to your `docker-compose.yml`:
```yaml
services:
backup:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-backup
environment:
- PGPASSWORD=${POSTGRES_PASSWORD}

View File

@@ -259,6 +259,92 @@ MCPHub supports environment variable substitution using `${VAR_NAME}` syntax:
}
```
### Proxy Configuration (proxychains4)
MCPHub supports routing STDIO server network traffic through a proxy using **proxychains4**. This feature is available on **Linux and macOS only** (Windows is not supported).
<Note>
To use this feature, you must have `proxychains4` installed on your system:
- **Debian/Ubuntu**: `apt install proxychains4`
- **macOS**: `brew install proxychains-ng`
- **Arch Linux**: `pacman -S proxychains-ng`
</Note>
#### Basic Proxy Configuration
```json
{
"mcpServers": {
"fetch-via-proxy": {
"command": "uvx",
"args": ["mcp-server-fetch"],
"proxy": {
"enabled": true,
"type": "socks5",
"host": "127.0.0.1",
"port": 1080
}
}
}
}
```
#### Proxy Configuration Options
| Field | Type | Default | Description |
| ------------ | ------- | --------- | ------------------------------------------------ |
| `enabled` | boolean | `false` | Enable/disable proxy routing |
| `type` | string | `socks5` | Proxy protocol: `socks4`, `socks5`, or `http` |
| `host` | string | - | Proxy server hostname or IP address |
| `port` | number | - | Proxy server port |
| `username` | string | - | Proxy authentication username (optional) |
| `password` | string | - | Proxy authentication password (optional) |
| `configPath` | string | - | Path to custom proxychains4 config file |
#### Proxy with Authentication
```json
{
"mcpServers": {
"secure-server": {
"command": "npx",
"args": ["-y", "@example/mcp-server"],
"proxy": {
"enabled": true,
"type": "http",
"host": "proxy.example.com",
"port": 8080,
"username": "${PROXY_USER}",
"password": "${PROXY_PASSWORD}"
}
}
}
}
```
#### Using Custom proxychains4 Configuration
For advanced use cases, you can provide your own proxychains4 configuration file:
```json
{
"mcpServers": {
"custom-proxy-server": {
"command": "python",
"args": ["-m", "custom_mcp_server"],
"proxy": {
"enabled": true,
"configPath": "/etc/proxychains4/custom.conf"
}
}
}
}
```
<Tip>
When `configPath` is specified, all other proxy settings (`type`, `host`, `port`, etc.) are ignored, and the custom configuration file is used directly.
</Tip>
{/* ### Custom Server Scripts
#### Local Python Server

View File

@@ -47,7 +47,7 @@ PORT=3000
NODE_ENV=development
# Database Configuration
DATABASE_URL=postgresql://username:password@localhost:5432/mcphub
DB_URL=postgresql://username:password@localhost:5432/mcphub
# JWT Configuration
JWT_SECRET=your-secret-key

View File

@@ -69,7 +69,7 @@ Smart Routing requires additional setup compared to basic MCPHub usage:
ports:
- "3000:3000"
environment:
- DATABASE_URL=postgresql://mcphub:password@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:password@postgres:5432/mcphub
- OPENAI_API_KEY=your_openai_api_key
- ENABLE_SMART_ROUTING=true
depends_on:
@@ -78,7 +78,7 @@ Smart Routing requires additional setup compared to basic MCPHub usage:
- ./mcp_settings.json:/app/mcp_settings.json
postgres:
image: pgvector/pgvector:pg16
image: pgvector/pgvector:pg17
environment:
- POSTGRES_DB=mcphub
- POSTGRES_USER=mcphub
@@ -114,7 +114,7 @@ Smart Routing requires additional setup compared to basic MCPHub usage:
2. **Set Environment Variables**:
```bash
export DATABASE_URL="postgresql://mcphub:your_password@localhost:5432/mcphub"
export DB_URL="postgresql://mcphub:your_password@localhost:5432/mcphub"
export OPENAI_API_KEY="your_openai_api_key"
export ENABLE_SMART_ROUTING="true"
```
@@ -146,7 +146,7 @@ Smart Routing requires additional setup compared to basic MCPHub usage:
spec:
containers:
- name: postgres
image: pgvector/pgvector:pg16
image: pgvector/pgvector:pg17
env:
- name: POSTGRES_DB
value: mcphub
@@ -178,7 +178,7 @@ Smart Routing requires additional setup compared to basic MCPHub usage:
- name: mcphub
image: samanhappy/mcphub:latest
env:
- name: DATABASE_URL
- name: DB_URL
value: "postgresql://mcphub:password@postgres:5432/mcphub"
- name: OPENAI_API_KEY
valueFrom:
@@ -202,7 +202,7 @@ Configure Smart Routing with these environment variables:
```bash
# Required
DATABASE_URL=postgresql://user:password@host:5432/database
DB_URL=postgresql://user:password@host:5432/database
OPENAI_API_KEY=your_openai_api_key
# Optional
@@ -219,10 +219,10 @@ EMBEDDING_BATCH_SIZE=100
<Accordion title="Database Configuration">
```bash
# Full PostgreSQL connection string
DATABASE_URL=postgresql://username:password@host:port/database?schema=public
DB_URL=postgresql://username:password@host:port/database?schema=public
# SSL configuration for cloud databases
DATABASE_URL=postgresql://user:pass@host:5432/db?sslmode=require
DB_URL=postgresql://user:pass@host:5432/db?sslmode=require
# Connection pool settings
DATABASE_POOL_SIZE=20
@@ -673,11 +673,11 @@ curl -X POST http://localhost:3000/api/smart-routing/feedback \
**Solutions:**
1. Verify PostgreSQL is running
2. Check DATABASE_URL format
2. Check DB_URL format
3. Ensure pgvector extension is installed
4. Test connection manually:
```bash
psql $DATABASE_URL -c "SELECT 1;"
psql $DB_URL -c "SELECT 1;"
```
</Accordion>

View File

@@ -96,7 +96,7 @@ Optional for Smart Routing:
# Optional: PostgreSQL for Smart Routing
postgres:
image: pgvector/pgvector:pg16
image: pgvector/pgvector:pg17
environment:
POSTGRES_DB: mcphub
POSTGRES_USER: mcphub
@@ -445,7 +445,7 @@ Set the following environment variables:
```bash
# Database connection
DATABASE_URL=postgresql://mcphub:your_password@localhost:5432/mcphub
DB_URL=postgresql://mcphub:your_password@localhost:5432/mcphub
# OpenAI API for embeddings
OPENAI_API_KEY=your_openai_api_key
@@ -563,10 +563,10 @@ curl -X POST http://localhost:3000/mcp \
**Database connection failed:**
```bash
# Test database connection
psql $DATABASE_URL -c "SELECT 1;"
psql $DB_URL -c "SELECT 1;"
# Check if pgvector is installed
psql $DATABASE_URL -c "CREATE EXTENSION IF NOT EXISTS vector;"
psql $DB_URL -c "CREATE EXTENSION IF NOT EXISTS vector;"
```
**Embedding service errors:**

View File

@@ -59,7 +59,7 @@ version: '3.8'
services:
postgres:
image: postgres:16
image: pgvector/pgvector:pg17
environment:
POSTGRES_DB: mcphub
POSTGRES_USER: mcphub

View File

@@ -106,7 +106,7 @@ services:
- NODE_ENV=production
- PORT=3000
- JWT_SECRET=${JWT_SECRET:-your-jwt-secret}
- DATABASE_URL=postgresql://mcphub:password@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:password@postgres:5432/mcphub
volumes:
- ./mcp_settings.json:/app/mcp_settings.json:ro
- ./servers.json:/app/servers.json:ro
@@ -119,7 +119,7 @@ services:
- mcphub-network
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres
environment:
- POSTGRES_DB=mcphub
@@ -180,7 +180,7 @@ services:
- PORT=3000
- JWT_SECRET=${JWT_SECRET}
- JWT_EXPIRES_IN=${JWT_EXPIRES_IN:-24h}
- DATABASE_URL=postgresql://mcphub:${POSTGRES_PASSWORD}@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:${POSTGRES_PASSWORD}@postgres:5432/mcphub
- OPENAI_API_KEY=${OPENAI_API_KEY}
- REDIS_URL=redis://redis:6379
volumes:
@@ -203,7 +203,7 @@ services:
retries: 3
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres
environment:
- POSTGRES_DB=mcphub
@@ -293,7 +293,7 @@ services:
environment:
- NODE_ENV=development
- PORT=3000
- DATABASE_URL=postgresql://mcphub:password@postgres:5432/mcphub
- DB_URL=postgresql://mcphub:password@postgres:5432/mcphub
volumes:
- .:/app
- /app/node_modules
@@ -305,7 +305,7 @@ services:
- mcphub-dev
postgres:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-postgres-dev
environment:
- POSTGRES_DB=mcphub
@@ -445,7 +445,7 @@ secrets:
```yaml
services:
backup:
image: postgres:15-alpine
image: pgvector/pgvector:pg17
container_name: mcphub-backup
environment:
- PGPASSWORD=${POSTGRES_PASSWORD}

View File

@@ -290,7 +290,7 @@ MCPHub 支持使用 `${VAR_NAME}` 语法进行环境变量替换:
"command": "python",
"args": ["-m", "db_server"],
"env": {
"DB_URL": "${NODE_ENV:development == 'production' ? DATABASE_URL : DEV_DATABASE_URL}"
"DB_URL": "${NODE_ENV:development == 'production' ? DB_URL : DEV_DB_URL}"
}
}
}

View File

@@ -47,7 +47,7 @@ PORT=3000
NODE_ENV=development
# 数据库配置
DATABASE_URL=postgresql://username:password@localhost:5432/mcphub
DB_URL=postgresql://username:password@localhost:5432/mcphub
# JWT 配置
JWT_SECRET=your-secret-key

View File

@@ -480,7 +480,7 @@ docker run -d \
--name mcphub \
-p 3000:3000 \
-e NODE_ENV=production \
-e DATABASE_URL=postgresql://user:pass@host:5432/mcphub \
-e DB_URL=postgresql://user:pass@host:5432/mcphub \
-e JWT_SECRET=your-secret-key \
mcphub/server:latest
@@ -504,7 +504,7 @@ docker run -d \
--name mcphub \
-p 3000:3000 \
-e NODE_ENV=production \
-e DATABASE_URL=postgresql://user:pass@host:5432/mcphub \
-e DB_URL=postgresql://user:pass@host:5432/mcphub \
-e JWT_SECRET=your-secret-key \
mcphub/server:latest

View File

@@ -159,7 +159,7 @@ services:
- '3000:3000'
environment:
- NODE_ENV=production
- DATABASE_URL=postgresql://user:pass@db:5432/mcphub
- DB_URL=postgresql://user:pass@db:5432/mcphub
```
````
@@ -172,7 +172,7 @@ services:
- '3000:3000'
environment:
- NODE_ENV=production
- DATABASE_URL=postgresql://user:pass@db:5432/mcphub
- DB_URL=postgresql://user:pass@db:5432/mcphub
```
### 终端命令

View File

@@ -245,11 +245,11 @@ curl -X POST http://localhost:3000/api/smart-routing/feedback \
**解决方案:**
1. 验证 PostgreSQL 是否正在运行
2. 检查 DATABASE_URL 格式
2. 检查 DB_URL 格式
3. 确保安装了 pgvector 扩展
4. 手动测试连接:
```bash
psql $DATABASE_URL -c "SELECT 1;"
psql $DB_URL -c "SELECT 1;"
```
</Accordion>

View File

@@ -96,7 +96,7 @@ description: '各种平台的详细安装说明'
# 可选:用于智能路由的 PostgreSQL
postgres:
image: pgvector/pgvector:pg16
image: pgvector/pgvector:pg17
environment:
POSTGRES_DB: mcphub
POSTGRES_USER: mcphub
@@ -420,7 +420,7 @@ description: '各种平台的详细安装说明'
```bash
# 数据库连接
DATABASE_URL=postgresql://mcphub:your_password@localhost:5432/mcphub
DB_URL=postgresql://mcphub:your_password@localhost:5432/mcphub
# 用于嵌入的 OpenAI API
OPENAI_API_KEY=your_openai_api_key
@@ -538,10 +538,10 @@ curl -X POST http://localhost:3000/mcp \
**数据库连接失败:**
```bash
# 测试数据库连接
psql $DATABASE_URL -c "SELECT 1;"
psql $DB_URL -c "SELECT 1;"
# 检查是否安装了 pgvector
psql $DATABASE_URL -c "CREATE EXTENSION IF NOT EXISTS vector;"
psql $DB_URL -c "CREATE EXTENSION IF NOT EXISTS vector;"
```
**嵌入服务错误:**

View File

@@ -28,7 +28,48 @@
"env": {
"API_KEY": "${MY_API_KEY}",
"DEBUG": "${DEBUG_MODE}",
"DATABASE_URL": "${DATABASE_URL}"
"DB_URL": "${DB_URL}"
}
},
"example-stdio-with-proxy": {
"type": "stdio",
"command": "uvx",
"args": [
"mcp-server-fetch"
],
"proxy": {
"enabled": true,
"type": "socks5",
"host": "${PROXY_HOST}",
"port": 1080
}
},
"example-stdio-with-auth-proxy": {
"type": "stdio",
"command": "npx",
"args": [
"-y",
"@example/mcp-server"
],
"proxy": {
"enabled": true,
"type": "http",
"host": "${HTTP_PROXY_HOST}",
"port": 8080,
"username": "${PROXY_USERNAME}",
"password": "${PROXY_PASSWORD}"
}
},
"example-stdio-with-custom-proxy-config": {
"type": "stdio",
"command": "python",
"args": [
"-m",
"custom_mcp_server"
],
"proxy": {
"enabled": true,
"configPath": "/etc/proxychains4/custom.conf"
}
},
"example-openapi-server": {
@@ -55,7 +96,10 @@
"clientId": "${OAUTH_CLIENT_ID}",
"clientSecret": "${OAUTH_CLIENT_SECRET}",
"accessToken": "${OAUTH_ACCESS_TOKEN}",
"scopes": ["read", "write"]
"scopes": [
"read",
"write"
]
}
}
},
@@ -77,4 +121,4 @@
"baseUrl": "${MCPROUTER_BASE_URL}"
}
}
}
}

View File

@@ -18,7 +18,17 @@ const EditServerForm = ({ server, onEdit, onCancel }: EditServerFormProps) => {
try {
setError(null);
const encodedServerName = encodeURIComponent(server.name);
const result = await apiPut(`/servers/${encodedServerName}`, payload);
// Check if name is being changed
const isRenaming = payload.name && payload.name !== server.name;
// Build the request body
const requestBody = {
config: payload.config,
...(isRenaming ? { newName: payload.name } : {}),
};
const result = await apiPut(`/servers/${encodedServerName}`, requestBody);
if (!result.success) {
// Use specific error message from the response if available

View File

@@ -1,99 +1,103 @@
import { useState, useRef, useEffect } from 'react'
import { useTranslation } from 'react-i18next'
import { Group, Server, IGroupServerConfig } from '@/types'
import { Edit, Trash, Copy, Check, Link, FileCode, DropdownIcon, Wrench } from '@/components/icons/LucideIcons'
import DeleteDialog from '@/components/ui/DeleteDialog'
import { useToast } from '@/contexts/ToastContext'
import { useSettingsData } from '@/hooks/useSettingsData'
import { useState, useRef, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { Group, Server, IGroupServerConfig } from '@/types';
import {
Edit,
Trash,
Copy,
Check,
Link,
FileCode,
DropdownIcon,
Wrench,
} from '@/components/icons/LucideIcons';
import DeleteDialog from '@/components/ui/DeleteDialog';
import { useToast } from '@/contexts/ToastContext';
import { useSettingsData } from '@/hooks/useSettingsData';
interface GroupCardProps {
group: Group
servers: Server[]
onEdit: (group: Group) => void
onDelete: (groupId: string) => void
group: Group;
servers: Server[];
onEdit: (group: Group) => void;
onDelete: (groupId: string) => void;
}
const GroupCard = ({
group,
servers,
onEdit,
onDelete
}: GroupCardProps) => {
const { t } = useTranslation()
const { showToast } = useToast()
const { installConfig } = useSettingsData()
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [copied, setCopied] = useState(false)
const [showCopyDropdown, setShowCopyDropdown] = useState(false)
const [expandedServer, setExpandedServer] = useState<string | null>(null)
const dropdownRef = useRef<HTMLDivElement>(null)
const GroupCard = ({ group, servers, onEdit, onDelete }: GroupCardProps) => {
const { t } = useTranslation();
const { showToast } = useToast();
const { installConfig } = useSettingsData();
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
const [copied, setCopied] = useState(false);
const [showCopyDropdown, setShowCopyDropdown] = useState(false);
const [expandedServer, setExpandedServer] = useState<string | null>(null);
const dropdownRef = useRef<HTMLDivElement>(null);
// Close dropdown when clicking outside
useEffect(() => {
const handleClickOutside = (event: MouseEvent) => {
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
setShowCopyDropdown(false)
setShowCopyDropdown(false);
}
}
};
document.addEventListener('mousedown', handleClickOutside)
document.addEventListener('mousedown', handleClickOutside);
return () => {
document.removeEventListener('mousedown', handleClickOutside)
}
}, [])
document.removeEventListener('mousedown', handleClickOutside);
};
}, []);
const handleEdit = () => {
onEdit(group)
}
onEdit(group);
};
const handleDelete = () => {
setShowDeleteDialog(true)
}
setShowDeleteDialog(true);
};
const handleConfirmDelete = () => {
onDelete(group.id)
setShowDeleteDialog(false)
}
onDelete(group.id);
setShowDeleteDialog(false);
};
const copyToClipboard = (text: string) => {
if (navigator.clipboard && window.isSecureContext) {
navigator.clipboard.writeText(text).then(() => {
setCopied(true)
setShowCopyDropdown(false)
showToast(t('common.copySuccess'), 'success')
setTimeout(() => setCopied(false), 2000)
})
setCopied(true);
setShowCopyDropdown(false);
showToast(t('common.copySuccess'), 'success');
setTimeout(() => setCopied(false), 2000);
});
} else {
// Fallback for HTTP or unsupported clipboard API
const textArea = document.createElement('textarea')
textArea.value = text
const textArea = document.createElement('textarea');
textArea.value = text;
// Avoid scrolling to bottom
textArea.style.position = 'fixed'
textArea.style.left = '-9999px'
document.body.appendChild(textArea)
textArea.focus()
textArea.select()
textArea.style.position = 'fixed';
textArea.style.left = '-9999px';
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
document.execCommand('copy')
setCopied(true)
setShowCopyDropdown(false)
showToast(t('common.copySuccess'), 'success')
setTimeout(() => setCopied(false), 2000)
document.execCommand('copy');
setCopied(true);
setShowCopyDropdown(false);
showToast(t('common.copySuccess'), 'success');
setTimeout(() => setCopied(false), 2000);
} catch (err) {
showToast(t('common.copyFailed') || 'Copy failed', 'error')
console.error('Copy to clipboard failed:', err)
showToast(t('common.copyFailed') || 'Copy failed', 'error');
console.error('Copy to clipboard failed:', err);
}
document.body.removeChild(textArea)
document.body.removeChild(textArea);
}
}
};
const handleCopyId = () => {
copyToClipboard(group.id)
}
copyToClipboard(group.id);
};
const handleCopyUrl = () => {
copyToClipboard(`${installConfig.baseUrl}/mcp/${group.id}`)
}
copyToClipboard(`${installConfig.baseUrl}/mcp/${group.id}`);
};
const handleCopyJson = () => {
const jsonConfig = {
@@ -101,23 +105,23 @@ const GroupCard = ({
mcphub: {
url: `${installConfig.baseUrl}/mcp/${group.id}`,
headers: {
Authorization: "Bearer <your-access-token>"
}
}
}
}
copyToClipboard(JSON.stringify(jsonConfig, null, 2))
}
Authorization: 'Bearer <your-access-token>',
},
},
},
};
copyToClipboard(JSON.stringify(jsonConfig, null, 2));
};
// Helper function to normalize group servers to get server names
const getServerNames = (servers: string[] | IGroupServerConfig[]): string[] => {
return servers.map(server => typeof server === 'string' ? server : server.name);
return servers.map((server) => (typeof server === 'string' ? server : server.name));
};
// Helper function to get server configuration
const getServerConfig = (serverName: string): IGroupServerConfig | undefined => {
const server = group.servers.find(s =>
typeof s === 'string' ? s === serverName : s.name === serverName
const server = group.servers.find((s) =>
typeof s === 'string' ? s === serverName : s.name === serverName,
);
if (typeof server === 'string') {
return { name: server, tools: 'all' };
@@ -127,11 +131,11 @@ const GroupCard = ({
// Get servers that belong to this group
const serverNames = getServerNames(group.servers);
const groupServers = servers.filter(server => serverNames.includes(server.name));
const groupServers = servers.filter((server) => serverNames.includes(server.name));
return (
<div className="bg-white shadow rounded-lg p-6 ">
<div className="flex justify-between items-center mb-4">
<div className="bg-white shadow rounded-lg p-4">
<div className="flex justify-between items-center">
<div>
<div className="flex items-center">
<h2 className="text-xl font-semibold text-gray-800">{group.name}</h2>
@@ -175,9 +179,7 @@ const GroupCard = ({
</div>
</div>
</div>
{group.description && (
<p className="text-gray-600 text-sm mt-1">{group.description}</p>
)}
{group.description && <p className="text-gray-600 text-sm mt-1">{group.description}</p>}
</div>
<div className="flex items-center space-x-3">
<div className="bg-blue-50 text-blue-700 px-3 py-1 rounded-full text-sm btn-secondary">
@@ -200,17 +202,19 @@ const GroupCard = ({
</div>
</div>
<div className="mt-4">
<div className="">
{groupServers.length === 0 ? (
<p className="text-gray-500 italic">{t('groups.noServers')}</p>
) : (
<div className="flex flex-wrap gap-2">
{groupServers.map(server => {
{groupServers.map((server) => {
const serverConfig = getServerConfig(server.name);
const hasToolRestrictions = serverConfig && serverConfig.tools !== 'all' && Array.isArray(serverConfig.tools);
const toolCount = hasToolRestrictions && Array.isArray(serverConfig?.tools)
? serverConfig.tools.length
: (server.tools?.length || 0); // Show total tool count when all tools are selected
const hasToolRestrictions =
serverConfig && serverConfig.tools !== 'all' && Array.isArray(serverConfig.tools);
const toolCount =
hasToolRestrictions && Array.isArray(serverConfig?.tools)
? serverConfig.tools.length
: server.tools?.length || 0; // Show total tool count when all tools are selected
const isExpanded = expandedServer === server.name;
@@ -219,7 +223,7 @@ const GroupCard = ({
if (hasToolRestrictions && Array.isArray(serverConfig?.tools)) {
return serverConfig.tools;
} else if (server.tools && server.tools.length > 0) {
return server.tools.map(tool => tool.name);
return server.tools.map((tool) => tool.name);
}
return [];
};
@@ -235,9 +239,15 @@ const GroupCard = ({
onClick={handleServerClick}
>
<span className="font-medium text-gray-700 text-sm">{server.name}</span>
<span className={`inline-block h-2 w-2 rounded-full ${server.status === 'connected' ? 'bg-green-500' :
server.status === 'connecting' ? 'bg-yellow-500' : 'bg-red-500'
}`}></span>
<span
className={`inline-block h-2 w-2 rounded-full ${
server.status === 'connected'
? 'bg-green-500'
: server.status === 'connecting'
? 'bg-yellow-500'
: 'bg-red-500'
}`}
></span>
{toolCount > 0 && (
<span className="text-xs text-blue-600 bg-blue-100 px-2 py-0.5 rounded flex items-center gap-1">
<Wrench size={12} />
@@ -278,7 +288,7 @@ const GroupCard = ({
isGroup={true}
/>
</div>
)
}
);
};
export default GroupCard
export default GroupCard;

View File

@@ -0,0 +1,284 @@
import React, { useState } from 'react';
import { useTranslation } from 'react-i18next';
import { apiPost } from '@/utils/fetchInterceptor';
interface GroupImportFormProps {
onSuccess: () => void;
onCancel: () => void;
}
interface ImportGroupConfig {
name: string;
description?: string;
servers?: string[] | Array<{ name: string; tools?: string[] | 'all' }>;
}
interface ImportJsonFormat {
groups: ImportGroupConfig[];
}
const GroupImportForm: React.FC<GroupImportFormProps> = ({ onSuccess, onCancel }) => {
const { t } = useTranslation();
const [jsonInput, setJsonInput] = useState('');
const [error, setError] = useState<string | null>(null);
const [isImporting, setIsImporting] = useState(false);
const [previewGroups, setPreviewGroups] = useState<ImportGroupConfig[] | null>(null);
const examplePlaceholder = `{
"groups": [
{
"name": "AI Assistants",
"servers": ["openai-server", "anthropic-server"]
},
{
"name": "Development Tools",
"servers": [
{
"name": "github-server",
"tools": ["create_issue", "list_repos"]
},
{
"name": "gitlab-server",
"tools": "all"
}
]
}
]
}
Supports:
- Simple server list: ["server1", "server2"]
- Advanced server config: [{"name": "server1", "tools": ["tool1", "tool2"]}]
- All groups will be imported in a single efficient batch operation.`;
const parseAndValidateJson = (input: string): ImportJsonFormat | null => {
try {
const parsed = JSON.parse(input.trim());
// Validate structure
if (!parsed.groups || !Array.isArray(parsed.groups)) {
setError(t('groupImport.invalidFormat'));
return null;
}
// Validate each group
for (const group of parsed.groups) {
if (!group.name || typeof group.name !== 'string') {
setError(t('groupImport.missingName'));
return null;
}
}
return parsed as ImportJsonFormat;
} catch (e) {
setError(t('groupImport.parseError'));
return null;
}
};
const handlePreview = () => {
setError(null);
const parsed = parseAndValidateJson(jsonInput);
if (!parsed) return;
setPreviewGroups(parsed.groups);
};
const handleImport = async () => {
if (!previewGroups) return;
setIsImporting(true);
setError(null);
try {
// Use batch import API for better performance
const result = await apiPost('/groups/batch', {
groups: previewGroups,
});
if (result.success) {
const { successCount, failureCount, results } = result;
if (failureCount > 0) {
const errors = results
.filter((r: any) => !r.success)
.map((r: any) => `${r.name}: ${r.message || t('groupImport.addFailed')}`);
setError(
t('groupImport.partialSuccess', { count: successCount, total: previewGroups.length }) +
'\n' +
errors.join('\n'),
);
}
if (successCount > 0) {
onSuccess();
}
} else {
setError(result.message || t('groupImport.importFailed'));
}
} catch (err) {
console.error('Import error:', err);
setError(t('groupImport.importFailed'));
} finally {
setIsImporting(false);
}
};
const renderServerList = (
servers?: string[] | Array<{ name: string; tools?: string[] | 'all' }>,
) => {
if (!servers || servers.length === 0) {
return <span className="text-gray-500">{t('groups.noServers')}</span>;
}
return (
<div className="space-y-1">
{servers.map((server, idx) => {
if (typeof server === 'string') {
return (
<div key={idx} className="text-sm">
{server}
</div>
);
} else {
return (
<div key={idx} className="text-sm">
{server.name}
{server.tools && server.tools !== 'all' && (
<span className="text-gray-500 ml-2">
({Array.isArray(server.tools) ? server.tools.join(', ') : server.tools})
</span>
)}
{server.tools === 'all' && <span className="text-gray-500 ml-2">(all tools)</span>}
</div>
);
}
})}
</div>
);
};
return (
<div className="fixed inset-0 bg-black/50 z-50 flex items-center justify-center p-4">
<div className="bg-white shadow rounded-lg p-6 w-full max-w-4xl max-h-[90vh] overflow-y-auto">
<div className="flex justify-between items-center mb-6">
<h2 className="text-xl font-semibold text-gray-900">{t('groupImport.title')}</h2>
<button onClick={onCancel} className="text-gray-500 hover:text-gray-700">
</button>
</div>
{error && (
<div className="mb-4 bg-red-50 border-l-4 border-red-500 p-4 rounded">
<p className="text-red-700 whitespace-pre-wrap">{error}</p>
</div>
)}
{!previewGroups ? (
<div>
<div className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-2">
{t('groupImport.inputLabel')}
</label>
<textarea
value={jsonInput}
onChange={(e) => setJsonInput(e.target.value)}
className="w-full h-96 px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 font-mono text-sm"
placeholder={examplePlaceholder}
/>
<p className="text-xs text-gray-500 mt-2">{t('groupImport.inputHelp')}</p>
</div>
<div className="flex justify-end space-x-4">
<button
onClick={onCancel}
className="px-4 py-2 text-gray-700 bg-gray-200 rounded hover:bg-gray-300 btn-secondary"
>
{t('common.cancel')}
</button>
<button
onClick={handlePreview}
disabled={!jsonInput.trim()}
className="px-4 py-2 bg-blue-600 text-white rounded hover:bg-blue-700 disabled:opacity-50 btn-primary"
>
{t('groupImport.preview')}
</button>
</div>
</div>
) : (
<div>
<div className="mb-4">
<h3 className="text-lg font-medium text-gray-900 mb-3">
{t('groupImport.previewTitle')}
</h3>
<div className="space-y-3">
{previewGroups.map((group, index) => (
<div key={index} className="bg-gray-50 p-4 rounded-lg border border-gray-200">
<div className="flex items-start justify-between">
<div className="flex-1">
<h4 className="font-medium text-gray-900">{group.name}</h4>
{group.description && (
<p className="text-sm text-gray-600 mt-1">{group.description}</p>
)}
<div className="mt-2 text-sm text-gray-600">
<strong>{t('groups.servers')}:</strong>
<div className="mt-1">{renderServerList(group.servers)}</div>
</div>
</div>
</div>
</div>
))}
</div>
</div>
<div className="flex justify-end space-x-4">
<button
onClick={() => setPreviewGroups(null)}
disabled={isImporting}
className="px-4 py-2 text-gray-700 bg-gray-200 rounded hover:bg-gray-300 disabled:opacity-50 btn-secondary"
>
{t('common.back')}
</button>
<button
onClick={handleImport}
disabled={isImporting}
className="px-4 py-2 bg-blue-600 text-white rounded hover:bg-blue-700 disabled:opacity-50 flex items-center btn-primary"
>
{isImporting ? (
<>
<svg
className="animate-spin h-4 w-4 mr-2"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
>
<circle
className="opacity-25"
cx="12"
cy="12"
r="10"
stroke="currentColor"
strokeWidth="4"
></circle>
<path
className="opacity-75"
fill="currentColor"
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
></path>
</svg>
{t('groupImport.importing')}
</>
) : (
t('groupImport.import')
)}
</button>
</div>
</div>
)}
</div>
</div>
);
};
export default GroupImportForm;

View File

@@ -14,6 +14,10 @@ interface McpServerConfig {
type?: string;
url?: string;
headers?: Record<string, string>;
openapi?: {
version: string;
url: string;
};
}
interface ImportJsonFormat {
@@ -29,29 +33,16 @@ const JSONImportForm: React.FC<JSONImportFormProps> = ({ onSuccess, onCancel })
null,
);
const examplePlaceholder = `STDIO example:
{
const examplePlaceholder = `{
"mcpServers": {
"stdio-server-example": {
"command": "npx",
"args": ["-y", "mcp-server-example"]
}
}
}
SSE example:
{
"mcpServers": {
},
"sse-server-example": {
"type": "sse",
"url": "http://localhost:3000"
}
}
}
HTTP example:
{
"mcpServers": {
},
"http-server-example": {
"type": "streamable-http",
"url": "http://localhost:3001",
@@ -59,9 +50,18 @@ HTTP example:
"Content-Type": "application/json",
"Authorization": "Bearer your-token"
}
},
"openapi-server-example": {
"type": "openapi",
"openapi": {
"url": "https://petstore.swagger.io/v2/swagger.json"
}
}
}
}`;
}
Supports: STDIO, SSE, HTTP (streamable-http), OpenAPI
All servers will be imported in a single efficient batch operation.`;
const parseAndValidateJson = (input: string): ImportJsonFormat | null => {
try {
@@ -95,6 +95,9 @@ HTTP example:
if (config.headers) {
normalizedConfig.headers = config.headers;
}
} else if (config.type === 'openapi') {
normalizedConfig.type = 'openapi';
normalizedConfig.openapi = config.openapi;
} else {
// Default to stdio
normalizedConfig.type = 'stdio';
@@ -118,38 +121,31 @@ HTTP example:
setError(null);
try {
let successCount = 0;
const errors: string[] = [];
// Use batch import API for better performance
const result = await apiPost('/servers/batch', {
servers: previewServers,
});
for (const server of previewServers) {
try {
const result = await apiPost('/servers', {
name: server.name,
config: server.config,
});
if (result.success && result.data) {
const { successCount, failureCount, results } = result.data;
if (result.success) {
successCount++;
} else {
errors.push(`${server.name}: ${result.message || t('jsonImport.addFailed')}`);
}
} catch (err) {
errors.push(
`${server.name}: ${err instanceof Error ? err.message : t('jsonImport.addFailed')}`,
if (failureCount > 0) {
const errors = results
.filter((r: any) => !r.success)
.map((r: any) => `${r.name}: ${r.message || t('jsonImport.addFailed')}`);
setError(
t('jsonImport.partialSuccess', { count: successCount, total: previewServers.length }) +
'\n' +
errors.join('\n'),
);
}
}
if (errors.length > 0) {
setError(
t('jsonImport.partialSuccess', { count: successCount, total: previewServers.length }) +
'\n' +
errors.join('\n'),
);
}
if (successCount > 0) {
onSuccess();
if (successCount > 0) {
onSuccess();
}
} else {
setError(result.message || t('jsonImport.importFailed'));
}
} catch (err) {
console.error('Import error:', err);

View File

@@ -18,7 +18,14 @@ interface ServerCardProps {
onReload?: (server: Server) => Promise<boolean>;
}
const ServerCard = ({ server, onRemove, onEdit, onToggle, onRefresh, onReload }: ServerCardProps) => {
const ServerCard = ({
server,
onRemove,
onEdit,
onToggle,
onRefresh,
onReload,
}: ServerCardProps) => {
const { t } = useTranslation();
const { showToast } = useToast();
const [isExpanded, setIsExpanded] = useState(false);
@@ -232,10 +239,10 @@ const ServerCard = ({ server, onRemove, onEdit, onToggle, onRefresh, onReload }:
return (
<>
<div
className={`bg-white shadow rounded-lg p-6 mb-6 page-card transition-all duration-200 ${server.enabled === false ? 'opacity-60' : ''}`}
className={`bg-white shadow rounded-lg mb-6 page-card transition-all duration-200 ${server.enabled === false ? 'opacity-60' : ''}`}
>
<div
className="flex justify-between items-center cursor-pointer"
className="flex justify-between items-center cursor-pointer p-4"
onClick={() => setIsExpanded(!isExpanded)}
>
<div className="flex items-center space-x-3">
@@ -385,9 +392,9 @@ const ServerCard = ({ server, onRemove, onEdit, onToggle, onRefresh, onReload }:
{isExpanded && (
<>
{server.tools && (
<div className="mt-6">
<div className="px-4">
<h6
className={`font-medium ${server.enabled === false ? 'text-gray-600' : 'text-gray-900'} mb-4`}
className={`font-medium ${server.enabled === false ? 'text-gray-600' : 'text-gray-900'} mb-2`}
>
{t('server.tools')}
</h6>
@@ -405,9 +412,9 @@ const ServerCard = ({ server, onRemove, onEdit, onToggle, onRefresh, onReload }:
)}
{server.prompts && (
<div className="mt-6">
<div className="px-4 pb-2">
<h6
className={`font-medium ${server.enabled === false ? 'text-gray-600' : 'text-gray-900'} mb-4`}
className={`font-medium ${server.enabled === false ? 'text-gray-600' : 'text-gray-900'}`}
>
{t('server.prompts')}
</h6>

View File

@@ -375,6 +375,7 @@ const ServerForm = ({
? {
url: formData.url,
...(Object.keys(headers).length > 0 ? { headers } : {}),
...(Object.keys(env).length > 0 ? { env } : {}),
...(oauthConfig ? { oauth: oauthConfig } : {}),
}
: {
@@ -428,7 +429,6 @@ const ServerForm = ({
className="shadow appearance-none border rounded w-full py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline form-input"
placeholder="e.g.: time-mcp"
required
disabled={isEdit}
/>
</div>
@@ -978,6 +978,49 @@ const ServerForm = ({
))}
</div>
<div className="mb-4">
<div className="flex justify-between items-center mb-2">
<label className="block text-gray-700 text-sm font-bold">
{t('server.envVars')}
</label>
<button
type="button"
onClick={addEnvVar}
className="bg-gray-200 hover:bg-gray-300 text-gray-700 font-medium py-1 px-2 rounded text-sm flex items-center justify-center min-w-[30px] min-h-[30px] btn-primary"
>
+
</button>
</div>
{envVars.map((envVar, index) => (
<div key={index} className="flex items-center mb-2">
<div className="flex items-center space-x-2 flex-grow">
<input
type="text"
value={envVar.key}
onChange={(e) => handleEnvVarChange(index, 'key', e.target.value)}
className="shadow appearance-none border rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline w-1/2 form-input"
placeholder={t('server.key')}
/>
<span className="flex items-center">:</span>
<input
type="text"
value={envVar.value}
onChange={(e) => handleEnvVarChange(index, 'value', e.target.value)}
className="shadow appearance-none border rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline w-1/2 form-input"
placeholder={t('server.value')}
/>
</div>
<button
type="button"
onClick={() => removeEnvVar(index)}
className="bg-gray-200 hover:bg-gray-300 text-gray-700 font-medium py-1 px-2 rounded text-sm flex items-center justify-center min-w-[30px] min-h-[30px] ml-2 btn-danger"
>
-
</button>
</div>
))}
</div>
<div className="mb-4">
<div
className="flex items-center justify-between cursor-pointer bg-gray-50 hover:bg-gray-100 p-3 rounded border border-gray-200"

View File

@@ -21,7 +21,14 @@ interface DynamicFormProps {
title?: string; // Optional title to display instead of default parameters title
}
const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, loading = false, storageKey, title }) => {
const DynamicForm: React.FC<DynamicFormProps> = ({
schema,
onSubmit,
onCancel,
loading = false,
storageKey,
title,
}) => {
const { t } = useTranslation();
const [formValues, setFormValues] = useState<Record<string, any>>({});
const [errors, setErrors] = useState<Record<string, string>>({});
@@ -40,9 +47,14 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
description: obj.description,
enum: obj.enum,
default: obj.default,
properties: obj.properties ? Object.fromEntries(
Object.entries(obj.properties).map(([key, value]) => [key, convertProperty(value)])
) : undefined,
properties: obj.properties
? Object.fromEntries(
Object.entries(obj.properties).map(([key, value]) => [
key,
convertProperty(value),
]),
)
: undefined,
required: obj.required,
items: obj.items ? convertProperty(obj.items) : undefined,
};
@@ -52,9 +64,14 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
return {
type: schema.type,
properties: schema.properties ? Object.fromEntries(
Object.entries(schema.properties).map(([key, value]) => [key, convertProperty(value)])
) : undefined,
properties: schema.properties
? Object.fromEntries(
Object.entries(schema.properties).map(([key, value]) => [
key,
convertProperty(value),
]),
)
: undefined,
required: schema.required,
};
};
@@ -167,7 +184,7 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
};
const handleInputChange = (path: string, value: any) => {
setFormValues(prev => {
setFormValues((prev) => {
const newValues = { ...prev };
const keys = path.split('.');
let current = newValues;
@@ -195,7 +212,7 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
// Clear error for this field
if (errors[path]) {
setErrors(prev => {
setErrors((prev) => {
const newErrors = { ...prev };
delete newErrors[path];
return newErrors;
@@ -209,10 +226,16 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
if (schema.type === 'object' && schema.properties) {
Object.entries(schema.properties).forEach(([key, propSchema]) => {
const fullPath = path ? `${path}.${key}` : key;
const value = getNestedValue(values, fullPath);
const value = values?.[key];
// Check required fields
if (schema.required?.includes(key) && (value === undefined || value === null || value === '')) {
if (
schema.required?.includes(key) &&
(value === undefined ||
value === null ||
value === '' ||
(Array.isArray(value) && value.length === 0))
) {
newErrors[fullPath] = `${key} is required`;
return;
}
@@ -223,7 +246,10 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
newErrors[fullPath] = `${key} must be a string`;
} else if (propSchema.type === 'number' && typeof value !== 'number') {
newErrors[fullPath] = `${key} must be a number`;
} else if (propSchema.type === 'integer' && (!Number.isInteger(value) || typeof value !== 'number')) {
} else if (
propSchema.type === 'integer' &&
(!Number.isInteger(value) || typeof value !== 'number')
) {
newErrors[fullPath] = `${key} must be an integer`;
} else if (propSchema.type === 'boolean' && typeof value !== 'boolean') {
newErrors[fullPath] = `${key} must be a boolean`;
@@ -260,7 +286,12 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
return path.split('.').reduce((current, key) => current?.[key], obj);
};
const renderObjectField = (key: string, schema: JsonSchema, currentValue: any, onChange: (value: any) => void): React.ReactNode => {
const renderObjectField = (
key: string,
schema: JsonSchema,
currentValue: any,
onChange: (value: any) => void,
): React.ReactNode => {
const value = currentValue?.[key];
if (schema.type === 'string') {
@@ -299,7 +330,12 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
step={schema.type === 'integer' ? '1' : 'any'}
value={value ?? ''}
onChange={(e) => {
const val = e.target.value === '' ? '' : schema.type === 'integer' ? parseInt(e.target.value) : parseFloat(e.target.value);
const val =
e.target.value === ''
? ''
: schema.type === 'integer'
? parseInt(e.target.value)
: parseFloat(e.target.value);
onChange(val);
}}
className="w-full border rounded-md px-2 py-1 text-sm border-gray-300 focus:outline-none focus:ring-1 focus:ring-blue-500 form-input"
@@ -333,7 +369,7 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
const renderField = (key: string, propSchema: JsonSchema, path: string = ''): React.ReactNode => {
const fullPath = path ? `${path}.${key}` : key;
const value = getNestedValue(formValues, fullPath);
const error = errors[fullPath]; // Handle array type
const error = errors[fullPath]; // Handle array type
if (propSchema.type === 'array') {
const arrayValue = getNestedValue(formValues, fullPath) || [];
@@ -341,7 +377,11 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div key={fullPath} className="mb-6">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? getNestedValue(jsonSchema, path)?.required?.includes(key) : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path
? getNestedValue(jsonSchema, path)?.required?.includes(key)
: jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{propSchema.description && (
<p className="text-xs text-gray-500 mb-2">{propSchema.description}</p>
@@ -349,9 +389,11 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div className="border border-gray-200 rounded-md p-3 bg-gray-50">
{arrayValue.map((item: any, index: number) => (
<div key={index} className="mb-3 p-3 bg-white border rounded-md">
<div key={index} className="mb-3 p-3 bg-white border border-gray-200 rounded-md">
<div className="flex justify-between items-center mb-2">
<span className="text-sm font-medium text-gray-600">{t('tool.item', { index: index + 1 })}</span>
<span className="text-sm font-medium text-gray-600">
{t('tool.item', { index: index + 1 })}
</span>
<button
type="button"
onClick={() => {
@@ -388,7 +430,9 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div key={objKey}>
<label className="block text-xs font-medium text-gray-600 mb-1">
{objKey}
{propSchema.items?.required?.includes(objKey) && <span className="text-status-red ml-1">*</span>}
{propSchema.items?.required?.includes(objKey) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{renderObjectField(objKey, objSchema as JsonSchema, item, (newValue) => {
const newArray = [...arrayValue];
@@ -429,7 +473,7 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
{error && <p className="text-status-red text-xs mt-1">{error}</p>}
</div>
);
} // Handle object type
} // Handle object type
if (propSchema.type === 'object') {
if (propSchema.properties) {
// Object with defined properties - render as nested form
@@ -437,16 +481,20 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div key={fullPath} className="mb-6">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? getNestedValue(jsonSchema, path)?.required?.includes(key) : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path
? getNestedValue(jsonSchema, path)?.required?.includes(key)
: jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{propSchema.description && (
<p className="text-xs text-gray-500 mb-2">{propSchema.description}</p>
)}
<div className="border border-gray-200 rounded-md p-4 bg-gray-50">
{Object.entries(propSchema.properties).map(([objKey, objSchema]) => (
renderField(objKey, objSchema as JsonSchema, fullPath)
))}
{Object.entries(propSchema.properties).map(([objKey, objSchema]) =>
renderField(objKey, objSchema as JsonSchema, fullPath),
)}
</div>
{error && <p className="text-status-red text-xs mt-1">{error}</p>}
@@ -458,7 +506,11 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div key={fullPath} className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? getNestedValue(jsonSchema, path)?.required?.includes(key) : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path
? getNestedValue(jsonSchema, path)?.required?.includes(key)
: jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
<span className="text-xs text-gray-500 ml-1">(JSON object)</span>
</label>
{propSchema.description && (
@@ -483,13 +535,16 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
</div>
);
}
} if (propSchema.type === 'string') {
}
if (propSchema.type === 'string') {
if (propSchema.enum) {
return (
<div key={fullPath} className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? false : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path ? false : jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{propSchema.description && (
<p className="text-xs text-gray-500 mb-2">{propSchema.description}</p>
@@ -514,7 +569,9 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<div key={fullPath} className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? false : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path ? false : jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{propSchema.description && (
<p className="text-xs text-gray-500 mb-2">{propSchema.description}</p>
@@ -529,12 +586,15 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
</div>
);
}
} if (propSchema.type === 'number' || propSchema.type === 'integer') {
}
if (propSchema.type === 'number' || propSchema.type === 'integer') {
return (
<div key={fullPath} className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? false : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path ? false : jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
{propSchema.description && (
<p className="text-xs text-gray-500 mb-2">{propSchema.description}</p>
@@ -544,7 +604,12 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
step={propSchema.type === 'integer' ? '1' : 'any'}
value={value !== undefined && value !== null ? value : ''}
onChange={(e) => {
const val = e.target.value === '' ? '' : propSchema.type === 'integer' ? parseInt(e.target.value) : parseFloat(e.target.value);
const val =
e.target.value === ''
? ''
: propSchema.type === 'integer'
? parseInt(e.target.value)
: parseFloat(e.target.value);
handleInputChange(fullPath, val);
}}
className={`w-full border rounded-md px-3 py-2 form-input ${error ? 'border-red-500' : 'border-gray-300'} focus:outline-none focus:ring-2 focus:ring-blue-500`}
@@ -566,7 +631,9 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
/>
<label className="ml-2 block text-sm text-gray-700">
{key}
{(path ? false : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path ? false : jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
</label>
</div>
{propSchema.description && (
@@ -575,12 +642,14 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
{error && <p className="text-status-red text-xs mt-1">{error}</p>}
</div>
);
} // For other types, show as text input with description
} // For other types, show as text input with description
return (
<div key={fullPath} className="mb-4">
<label className="block text-sm font-medium text-gray-700 mb-1">
{key}
{(path ? false : jsonSchema.required?.includes(key)) && <span className="text-status-red ml-1">*</span>}
{(path ? false : jsonSchema.required?.includes(key)) && (
<span className="text-status-red ml-1">*</span>
)}
<span className="text-xs text-gray-500 ml-1">({propSchema.type})</span>
</label>
{propSchema.description && (
@@ -631,20 +700,22 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
<button
type="button"
onClick={switchToFormMode}
className={`px-3 py-1 text-sm rounded-md transition-colors ${!isJsonMode
? 'bg-blue-100 text-blue-800 rounded hover:bg-blue-200 text-sm btn-primary'
: 'text-sm text-gray-600 bg-gray-200 rounded hover:bg-gray-300 btn-secondary'
}`}
className={`px-3 py-1 text-sm rounded-md transition-colors ${
!isJsonMode
? 'bg-blue-100 text-blue-800 rounded hover:bg-blue-200 text-sm btn-primary'
: 'text-sm text-gray-600 bg-gray-200 rounded hover:bg-gray-300 btn-secondary'
}`}
>
{t('tool.formMode')}
</button>
<button
type="button"
onClick={switchToJsonMode}
className={`px-3 py-1 text-sm rounded-md transition-colors ${isJsonMode
? 'px-4 py-1 bg-blue-100 text-blue-800 rounded hover:bg-blue-200 text-sm btn-primary'
: 'text-sm text-gray-600 bg-gray-200 rounded hover:bg-gray-300 btn-secondary'
}`}
className={`px-3 py-1 text-sm rounded-md transition-colors ${
isJsonMode
? 'px-4 py-1 bg-blue-100 text-blue-800 rounded hover:bg-blue-200 text-sm btn-primary'
: 'text-sm text-gray-600 bg-gray-200 rounded hover:bg-gray-300 btn-secondary'
}`}
>
{t('tool.jsonMode')}
</button>
@@ -662,8 +733,9 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
value={jsonText}
onChange={(e) => handleJsonTextChange(e.target.value)}
placeholder={`{\n "key": "value"\n}`}
className={`w-full h-64 border rounded-md px-3 py-2 font-mono text-sm resize-y form-input ${jsonError ? 'border-red-500' : 'border-gray-300'
} focus:outline-none focus:ring-2 focus:ring-blue-500`}
className={`w-full h-64 border rounded-md px-3 py-2 font-mono text-sm resize-y form-input ${
jsonError ? 'border-red-500' : 'border-gray-300'
} focus:outline-none focus:ring-2 focus:ring-blue-500`}
/>
{jsonError && <p className="text-status-red text-xs mt-1">{jsonError}</p>}
</div>
@@ -696,7 +768,7 @@ const DynamicForm: React.FC<DynamicFormProps> = ({ schema, onSubmit, onCancel, l
/* Form Mode */
<form onSubmit={handleSubmit} className="space-y-4">
{Object.entries(jsonSchema.properties || {}).map(([key, propSchema]) =>
renderField(key, propSchema)
renderField(key, propSchema),
)}
<div className="flex justify-end space-x-2 pt-4">

View File

@@ -0,0 +1,161 @@
import React, { useState, useRef, useEffect } from 'react';
import { Check, ChevronDown, X } from 'lucide-react';
interface MultiSelectProps {
options: { value: string; label: string }[];
selected: string[];
onChange: (selected: string[]) => void;
placeholder?: string;
disabled?: boolean;
className?: string;
}
export const MultiSelect: React.FC<MultiSelectProps> = ({
options,
selected,
onChange,
placeholder = 'Select items...',
disabled = false,
className = '',
}) => {
const [isOpen, setIsOpen] = useState(false);
const [searchTerm, setSearchTerm] = useState('');
const dropdownRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLInputElement>(null);
// Close dropdown when clicking outside
useEffect(() => {
const handleClickOutside = (event: MouseEvent) => {
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
setIsOpen(false);
setSearchTerm('');
}
};
document.addEventListener('mousedown', handleClickOutside);
return () => document.removeEventListener('mousedown', handleClickOutside);
}, []);
const filteredOptions = options.filter((option) =>
option.label.toLowerCase().includes(searchTerm.toLowerCase()),
);
const handleToggleOption = (value: string) => {
if (disabled) return;
const newSelected = selected.includes(value)
? selected.filter((item) => item !== value)
: [...selected, value];
onChange(newSelected);
};
const handleRemoveItem = (value: string, e: React.MouseEvent) => {
e.stopPropagation();
if (disabled) return;
onChange(selected.filter((item) => item !== value));
};
const handleToggleDropdown = () => {
if (disabled) return;
setIsOpen(!isOpen);
if (!isOpen) {
setTimeout(() => inputRef.current?.focus(), 0);
}
};
const getSelectedLabels = () => {
return selected
.map((value) => options.find((opt) => opt.value === value)?.label || value)
.filter(Boolean);
};
return (
<div ref={dropdownRef} className={`relative ${className}`}>
{/* Selected items display */}
<div
onClick={handleToggleDropdown}
className={`
min-h-[38px] w-full px-3 py-1.5 border rounded-md shadow-sm
flex flex-wrap items-center gap-1.5 cursor-pointer
transition-all duration-200
${disabled ? 'bg-gray-100 cursor-not-allowed' : 'bg-white hover:border-blue-400'}
${isOpen ? 'border-blue-500 ring-1 ring-blue-500' : 'border-gray-300'}
`}
>
{selected.length > 0 ? (
<>
{getSelectedLabels().map((label, index) => (
<span
key={selected[index]}
className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-blue-100 text-blue-800"
>
{label}
{!disabled && (
<button
type="button"
onClick={(e) => handleRemoveItem(selected[index], e)}
className="ml-1 hover:bg-blue-200 rounded-full p-0.5 transition-colors"
>
<X className="h-3 w-3" />
</button>
)}
</span>
))}
</>
) : (
<span className="text-gray-400 text-sm">{placeholder}</span>
)}
<div className="flex-1"></div>
<ChevronDown
className={`h-4 w-4 text-gray-400 transition-transform duration-200 ${isOpen ? 'transform rotate-180' : ''}`}
/>
</div>
{/* Dropdown menu */}
{isOpen && !disabled && (
<div className="absolute z-50 w-full mt-1 bg-white border border-gray-300 rounded-md shadow-lg max-h-60 overflow-hidden">
{/* Search input */}
<div className="p-2 border-b border-gray-200">
<input
ref={inputRef}
type="text"
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
placeholder="Search..."
className="w-full px-3 py-1.5 text-sm border border-gray-300 rounded focus:outline-none focus:ring-1 focus:ring-blue-500 focus:border-blue-500"
onClick={(e) => e.stopPropagation()}
/>
</div>
{/* Options list */}
<div className="max-h-48 overflow-y-auto">
{filteredOptions.length > 0 ? (
filteredOptions.map((option) => {
const isSelected = selected.includes(option.value);
return (
<div
key={option.value}
onClick={() => handleToggleOption(option.value)}
className={`
px-3 py-2 cursor-pointer flex items-center justify-between
transition-colors duration-150
${isSelected ? 'bg-blue-50 text-blue-700' : 'hover:bg-gray-100'}
`}
>
<span className="text-sm">{option.label}</span>
{isSelected && <Check className="h-4 w-4 text-blue-600" />}
</div>
);
})
) : (
<div className="px-3 py-2 text-sm text-gray-500 text-center">
{searchTerm ? 'No results found' : 'No options available'}
</div>
)}
</div>
</div>
)}
</div>
);
};

View File

@@ -1,16 +1,20 @@
import React from 'react';
import { useTranslation } from 'react-i18next';
interface PaginationProps {
currentPage: number;
totalPages: number;
onPageChange: (page: number) => void;
disabled?: boolean;
}
const Pagination: React.FC<PaginationProps> = ({
currentPage,
totalPages,
onPageChange
onPageChange,
disabled = false
}) => {
const { t } = useTranslation();
// Generate page buttons
const getPageButtons = () => {
const buttons = [];
@@ -95,26 +99,26 @@ const Pagination: React.FC<PaginationProps> = ({
<div className="flex justify-center items-center my-6">
<button
onClick={() => onPageChange(Math.max(1, currentPage - 1))}
disabled={currentPage === 1}
className={`px-3 py-1 rounded mr-2 ${currentPage === 1
disabled={disabled || currentPage === 1}
className={`px-3 py-1 rounded mr-2 ${disabled || currentPage === 1
? 'bg-gray-100 text-gray-400 cursor-not-allowed'
: 'bg-gray-200 hover:bg-gray-300 text-gray-700 btn-secondary'
}`}
>
&laquo; Prev
&laquo; {t('common.previous')}
</button>
<div className="flex">{getPageButtons()}</div>
<button
onClick={() => onPageChange(Math.min(totalPages, currentPage + 1))}
disabled={currentPage === totalPages}
className={`px-3 py-1 rounded ml-2 ${currentPage === totalPages
disabled={disabled || currentPage === totalPages}
className={`px-3 py-1 rounded ml-2 ${disabled || currentPage === totalPages
? 'bg-gray-100 text-gray-400 cursor-not-allowed'
: 'bg-gray-200 hover:bg-gray-300 text-gray-700 btn-secondary'
}`}
>
Next &raquo;
{t('common.next')} &raquo;
</button>
</div>
);

View File

@@ -171,9 +171,9 @@ const PromptCard = ({ prompt, server, onToggle, onDescriptionUpdate }: PromptCar
};
return (
<div className="bg-white border border-gray-200 shadow rounded-lg p-4 mb-4">
<div className="bg-white border border-gray-200 shadow rounded-lg mb-4">
<div
className="flex justify-between items-center cursor-pointer"
className="flex justify-between items-center p-2 cursor-pointer"
onClick={() => setIsExpanded(!isExpanded)}
>
<div className="flex-1">

View File

@@ -1,19 +1,27 @@
import { useState, useCallback, useRef, useEffect } from 'react'
import { useTranslation } from 'react-i18next'
import { Tool } from '@/types'
import { ChevronDown, ChevronRight, Play, Loader, Edit, Check, Copy } from '@/components/icons/LucideIcons'
import { callTool, ToolCallResult, updateToolDescription } from '@/services/toolService'
import { useSettingsData } from '@/hooks/useSettingsData'
import { useToast } from '@/contexts/ToastContext'
import { Switch } from './ToggleGroup'
import DynamicForm from './DynamicForm'
import ToolResult from './ToolResult'
import { useState, useCallback, useRef, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { Tool } from '@/types';
import {
ChevronDown,
ChevronRight,
Play,
Loader,
Edit,
Check,
Copy,
} from '@/components/icons/LucideIcons';
import { callTool, ToolCallResult, updateToolDescription } from '@/services/toolService';
import { useSettingsData } from '@/hooks/useSettingsData';
import { useToast } from '@/contexts/ToastContext';
import { Switch } from './ToggleGroup';
import DynamicForm from './DynamicForm';
import ToolResult from './ToolResult';
interface ToolCardProps {
server: string
tool: Tool
onToggle?: (toolName: string, enabled: boolean) => void
onDescriptionUpdate?: (toolName: string, description: string) => void
server: string;
tool: Tool;
onToggle?: (toolName: string, enabled: boolean) => void;
onDescriptionUpdate?: (toolName: string, description: string) => void;
}
// Helper to check for "empty" values
@@ -26,165 +34,173 @@ function isEmptyValue(value: any): boolean {
}
const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps) => {
const { t } = useTranslation()
const { showToast } = useToast()
const { nameSeparator } = useSettingsData()
const [isExpanded, setIsExpanded] = useState(false)
const [showRunForm, setShowRunForm] = useState(false)
const [isRunning, setIsRunning] = useState(false)
const [result, setResult] = useState<ToolCallResult | null>(null)
const [isEditingDescription, setIsEditingDescription] = useState(false)
const [customDescription, setCustomDescription] = useState(tool.description || '')
const descriptionInputRef = useRef<HTMLInputElement>(null)
const descriptionTextRef = useRef<HTMLSpanElement>(null)
const [textWidth, setTextWidth] = useState<number>(0)
const [copiedToolName, setCopiedToolName] = useState(false)
const { t } = useTranslation();
const { showToast } = useToast();
const { nameSeparator } = useSettingsData();
const [isExpanded, setIsExpanded] = useState(false);
const [showRunForm, setShowRunForm] = useState(false);
const [isRunning, setIsRunning] = useState(false);
const [result, setResult] = useState<ToolCallResult | null>(null);
const [isEditingDescription, setIsEditingDescription] = useState(false);
const [customDescription, setCustomDescription] = useState(tool.description || '');
const descriptionInputRef = useRef<HTMLInputElement>(null);
const descriptionTextRef = useRef<HTMLSpanElement>(null);
const [textWidth, setTextWidth] = useState<number>(0);
const [copiedToolName, setCopiedToolName] = useState(false);
// Focus the input when editing mode is activated
useEffect(() => {
if (isEditingDescription && descriptionInputRef.current) {
descriptionInputRef.current.focus()
descriptionInputRef.current.focus();
// Set input width to match text width
if (textWidth > 0) {
descriptionInputRef.current.style.width = `${textWidth + 20}px` // Add some padding
descriptionInputRef.current.style.width = `${textWidth + 20}px`; // Add some padding
}
}
}, [isEditingDescription, textWidth])
}, [isEditingDescription, textWidth]);
// Measure text width when not editing
useEffect(() => {
if (!isEditingDescription && descriptionTextRef.current) {
setTextWidth(descriptionTextRef.current.offsetWidth)
setTextWidth(descriptionTextRef.current.offsetWidth);
}
}, [isEditingDescription, customDescription])
}, [isEditingDescription, customDescription]);
// Generate a unique key for localStorage based on tool name and server
const getStorageKey = useCallback(() => {
return `mcphub_tool_form_${server ? `${server}_` : ''}${tool.name}`
}, [tool.name, server])
return `mcphub_tool_form_${server ? `${server}_` : ''}${tool.name}`;
}, [tool.name, server]);
// Clear form data from localStorage
const clearStoredFormData = useCallback(() => {
localStorage.removeItem(getStorageKey())
}, [getStorageKey])
localStorage.removeItem(getStorageKey());
}, [getStorageKey]);
const handleToggle = (enabled: boolean) => {
if (onToggle) {
onToggle(tool.name, enabled)
onToggle(tool.name, enabled);
}
}
};
const handleDescriptionEdit = () => {
setIsEditingDescription(true)
}
setIsEditingDescription(true);
};
const handleDescriptionSave = async () => {
try {
const result = await updateToolDescription(server, tool.name, customDescription)
const result = await updateToolDescription(server, tool.name, customDescription);
if (result.success) {
setIsEditingDescription(false)
setIsEditingDescription(false);
if (onDescriptionUpdate) {
onDescriptionUpdate(tool.name, customDescription)
onDescriptionUpdate(tool.name, customDescription);
}
} else {
// Revert on error
setCustomDescription(tool.description || '')
console.error('Failed to update tool description:', result.error)
setCustomDescription(tool.description || '');
console.error('Failed to update tool description:', result.error);
}
} catch (error) {
console.error('Error updating tool description:', error)
setCustomDescription(tool.description || '')
setIsEditingDescription(false)
console.error('Error updating tool description:', error);
setCustomDescription(tool.description || '');
setIsEditingDescription(false);
}
}
};
const handleDescriptionChange = (e: React.ChangeEvent<HTMLInputElement>) => {
setCustomDescription(e.target.value)
}
setCustomDescription(e.target.value);
};
const handleDescriptionKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter') {
handleDescriptionSave()
handleDescriptionSave();
} else if (e.key === 'Escape') {
setCustomDescription(tool.description || '')
setIsEditingDescription(false)
setCustomDescription(tool.description || '');
setIsEditingDescription(false);
}
}
};
const handleCopyToolName = async (e: React.MouseEvent) => {
e.stopPropagation()
e.stopPropagation();
try {
if (navigator.clipboard && window.isSecureContext) {
await navigator.clipboard.writeText(tool.name)
setCopiedToolName(true)
showToast(t('common.copySuccess'), 'success')
setTimeout(() => setCopiedToolName(false), 2000)
await navigator.clipboard.writeText(tool.name);
setCopiedToolName(true);
showToast(t('common.copySuccess'), 'success');
setTimeout(() => setCopiedToolName(false), 2000);
} else {
// Fallback for HTTP or unsupported clipboard API
const textArea = document.createElement('textarea')
textArea.value = tool.name
textArea.style.position = 'fixed'
textArea.style.left = '-9999px'
document.body.appendChild(textArea)
textArea.focus()
textArea.select()
const textArea = document.createElement('textarea');
textArea.value = tool.name;
textArea.style.position = 'fixed';
textArea.style.left = '-9999px';
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
document.execCommand('copy')
setCopiedToolName(true)
showToast(t('common.copySuccess'), 'success')
setTimeout(() => setCopiedToolName(false), 2000)
document.execCommand('copy');
setCopiedToolName(true);
showToast(t('common.copySuccess'), 'success');
setTimeout(() => setCopiedToolName(false), 2000);
} catch (err) {
showToast(t('common.copyFailed'), 'error')
console.error('Copy to clipboard failed:', err)
showToast(t('common.copyFailed'), 'error');
console.error('Copy to clipboard failed:', err);
}
document.body.removeChild(textArea)
document.body.removeChild(textArea);
}
} catch (error) {
showToast(t('common.copyFailed'), 'error')
console.error('Copy to clipboard failed:', error)
showToast(t('common.copyFailed'), 'error');
console.error('Copy to clipboard failed:', error);
}
}
};
const handleRunTool = async (arguments_: Record<string, any>) => {
setIsRunning(true)
setIsRunning(true);
try {
// filter empty values
arguments_ = Object.fromEntries(Object.entries(arguments_).filter(([_, v]) => !isEmptyValue(v)))
const result = await callTool({
toolName: tool.name,
arguments: arguments_,
}, server)
arguments_ = Object.fromEntries(
Object.entries(arguments_).filter(([_, v]) => !isEmptyValue(v)),
);
const result = await callTool(
{
toolName: tool.name,
arguments: arguments_,
},
server,
);
setResult(result)
setResult(result);
// Clear form data on successful submission
// clearStoredFormData()
} catch (error) {
setResult({
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
})
});
} finally {
setIsRunning(false)
setIsRunning(false);
}
}
};
const handleCancelRun = () => {
setShowRunForm(false)
setShowRunForm(false);
// Clear form data when cancelled
clearStoredFormData()
setResult(null)
}
clearStoredFormData();
setResult(null);
};
const handleCloseResult = () => {
setResult(null)
}
setResult(null);
};
return (
<div className="bg-white border border-gray-200 shadow rounded-lg p-4 mb-4">
<div className="bg-white border border-gray-200 shadow rounded-lg mb-4">
<div
className="flex justify-between items-center cursor-pointer"
onClick={() => setIsExpanded(!isExpanded)}
className="flex justify-between items-center cursor-pointer p-2"
onClick={(e) => {
e.stopPropagation();
setIsExpanded(!isExpanded);
}}
>
<div className="flex-1">
<h3 className="text-lg font-medium text-gray-900 inline-flex items-center">
@@ -194,11 +210,7 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
onClick={handleCopyToolName}
title={t('common.copy')}
>
{copiedToolName ? (
<Check size={16} className="text-green-500" />
) : (
<Copy size={16} />
)}
{copiedToolName ? <Check size={16} className="text-green-500" /> : <Copy size={16} />}
</button>
<span className="ml-2 text-sm font-normal text-gray-600 inline-flex items-center">
{isEditingDescription ? (
@@ -213,14 +225,14 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
onClick={(e) => e.stopPropagation()}
style={{
minWidth: '100px',
width: textWidth > 0 ? `${textWidth + 20}px` : 'auto'
width: textWidth > 0 ? `${textWidth + 20}px` : 'auto',
}}
/>
<button
className="ml-2 p-1 text-green-600 hover:text-green-800 cursor-pointer transition-colors"
onClick={(e) => {
e.stopPropagation()
handleDescriptionSave()
e.stopPropagation();
handleDescriptionSave();
}}
>
<Check size={16} />
@@ -228,12 +240,14 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
</>
) : (
<>
<span ref={descriptionTextRef}>{customDescription || t('tool.noDescription')}</span>
<span ref={descriptionTextRef}>
{customDescription || t('tool.noDescription')}
</span>
<button
className="ml-2 p-1 text-gray-500 hover:text-blue-600 cursor-pointer transition-colors"
onClick={(e) => {
e.stopPropagation()
handleDescriptionEdit()
e.stopPropagation();
handleDescriptionEdit();
}}
>
<Edit size={14} />
@@ -244,10 +258,7 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
</h3>
</div>
<div className="flex items-center space-x-2">
<div
className="flex items-center space-x-2"
onClick={(e) => e.stopPropagation()}
>
<div className="flex items-center space-x-2" onClick={(e) => e.stopPropagation()}>
<Switch
checked={tool.enabled ?? true}
onCheckedChange={handleToggle}
@@ -256,18 +267,14 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
</div>
<button
onClick={(e) => {
e.stopPropagation()
setIsExpanded(true) // Ensure card is expanded when showing run form
setShowRunForm(true)
e.stopPropagation();
setIsExpanded(true); // Ensure card is expanded when showing run form
setShowRunForm(true);
}}
className="flex items-center space-x-1 px-3 py-1 text-sm text-blue-600 bg-blue-50 hover:bg-blue-100 rounded-md transition-colors btn-primary"
disabled={isRunning || !tool.enabled}
>
{isRunning ? (
<Loader size={14} className="animate-spin" />
) : (
<Play size={14} />
)}
{isRunning ? <Loader size={14} className="animate-spin" /> : <Play size={14} />}
<span>{isRunning ? t('tool.running') : t('tool.run')}</span>
</button>
<button className="text-gray-400 hover:text-gray-600">
@@ -297,7 +304,9 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
onCancel={handleCancelRun}
loading={isRunning}
storageKey={getStorageKey()}
title={t('tool.runToolWithName', { name: tool.name.replace(server + nameSeparator, '') })}
title={t('tool.runToolWithName', {
name: tool.name.replace(server + nameSeparator, ''),
})}
/>
{/* Tool Result */}
{result && (
@@ -307,12 +316,10 @@ const ToolCard = ({ tool, server, onToggle, onDescriptionUpdate }: ToolCardProps
)}
</div>
)}
</div>
)}
</div>
)
}
);
};
export default ToolCard
export default ToolCard;

View File

@@ -14,14 +14,17 @@ const initialState: AuthState = {
// Create auth context
const AuthContext = createContext<{
auth: AuthState;
login: (username: string, password: string) => Promise<{ success: boolean; isUsingDefaultPassword?: boolean }>;
login: (
username: string,
password: string,
) => Promise<{ success: boolean; isUsingDefaultPassword?: boolean; message?: string }>;
register: (username: string, password: string, isAdmin?: boolean) => Promise<boolean>;
logout: () => void;
}>({
auth: initialState,
login: async () => ({ success: false }),
register: async () => false,
logout: () => { },
logout: () => {},
});
// Auth provider component
@@ -90,7 +93,10 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
}, []);
// Login function
const login = async (username: string, password: string): Promise<{ success: boolean; isUsingDefaultPassword?: boolean }> => {
const login = async (
username: string,
password: string,
): Promise<{ success: boolean; isUsingDefaultPassword?: boolean; message?: string }> => {
try {
const response = await authService.login({ username, password });
@@ -111,7 +117,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
loading: false,
error: response.message || 'Authentication failed',
});
return { success: false };
return { success: false, message: response.message };
}
} catch (error) {
setAuth({
@@ -119,7 +125,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
loading: false,
error: 'Authentication failed',
});
return { success: false };
return { success: false, message: error instanceof Error ? error.message : undefined };
}
};
@@ -127,7 +133,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
const register = async (
username: string,
password: string,
isAdmin = false
isAdmin = false,
): Promise<boolean> => {
try {
const response = await authService.register({ username, password, isAdmin });
@@ -175,4 +181,4 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
};
// Custom hook to use auth context
export const useAuth = () => useContext(AuthContext);
export const useAuth = () => useContext(AuthContext);

View File

@@ -17,6 +17,16 @@ const CONFIG = {
},
};
// Pagination info type
interface PaginationInfo {
page: number;
limit: number;
total: number;
totalPages: number;
hasNextPage: boolean;
hasPrevPage: boolean;
}
// Context type definition
interface ServerContextType {
servers: Server[];
@@ -24,6 +34,11 @@ interface ServerContextType {
setError: (error: string | null) => void;
isLoading: boolean;
fetchAttempts: number;
pagination: PaginationInfo | null;
currentPage: number;
serversPerPage: number;
setCurrentPage: (page: number) => void;
setServersPerPage: (limit: number) => void;
triggerRefresh: () => void;
refreshIfNeeded: () => void; // Smart refresh with debounce
handleServerAdd: () => void;
@@ -45,6 +60,9 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
const [refreshKey, setRefreshKey] = useState(0);
const [isInitialLoading, setIsInitialLoading] = useState(true);
const [fetchAttempts, setFetchAttempts] = useState(0);
const [pagination, setPagination] = useState<PaginationInfo | null>(null);
const [currentPage, setCurrentPage] = useState(1);
const [serversPerPage, setServersPerPage] = useState(10);
// Timer reference for polling
const intervalRef = useRef<NodeJS.Timeout | null>(null);
@@ -73,18 +91,31 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
const fetchServers = async () => {
try {
console.log('[ServerContext] Fetching servers from API...');
const data = await apiGet('/servers');
// Build query parameters for pagination
const params = new URLSearchParams();
params.append('page', currentPage.toString());
params.append('limit', serversPerPage.toString());
const data = await apiGet(`/servers?${params.toString()}`);
// Update last fetch time
lastFetchTimeRef.current = Date.now();
if (data && data.success && Array.isArray(data.data)) {
setServers(data.data);
// Update pagination info if available
if (data.pagination) {
setPagination(data.pagination);
} else {
setPagination(null);
}
} else if (data && Array.isArray(data)) {
// Compatibility handling for non-paginated responses
setServers(data);
setPagination(null);
} else {
console.error('Invalid server data format:', data);
setServers([]);
setPagination(null);
}
// Reset error state
@@ -114,7 +145,7 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
// Set up regular polling
intervalRef.current = setInterval(fetchServers, CONFIG.normal.pollingInterval);
},
[t],
[t, currentPage, serversPerPage],
);
// Watch for authentication status changes
@@ -150,7 +181,11 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
const fetchInitialData = async () => {
try {
console.log('[ServerContext] Initial fetch - attempt', attemptsRef.current + 1);
const data = await apiGet('/servers');
// Build query parameters for pagination
const params = new URLSearchParams();
params.append('page', currentPage.toString());
params.append('limit', serversPerPage.toString());
const data = await apiGet(`/servers?${params.toString()}`);
// Update last fetch time
lastFetchTimeRef.current = Date.now();
@@ -158,6 +193,12 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
// Handle API response wrapper object, extract data field
if (data && data.success && Array.isArray(data.data)) {
setServers(data.data);
// Update pagination info if available
if (data.pagination) {
setPagination(data.pagination);
} else {
setPagination(null);
}
setIsInitialLoading(false);
// Initialization successful, start normal polling (skip immediate to avoid duplicate fetch)
startNormalPolling({ immediate: false });
@@ -165,6 +206,7 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
} else if (data && Array.isArray(data)) {
// Compatibility handling, if API directly returns array
setServers(data);
setPagination(null);
setIsInitialLoading(false);
// Initialization successful, start normal polling (skip immediate to avoid duplicate fetch)
startNormalPolling({ immediate: false });
@@ -173,6 +215,7 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
// If data format is not as expected, set to empty array
console.error('Invalid server data format:', data);
setServers([]);
setPagination(null);
setIsInitialLoading(false);
// Initialization successful but data is empty, start normal polling (skip immediate)
startNormalPolling({ immediate: false });
@@ -227,7 +270,7 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
return () => {
clearTimer();
};
}, [refreshKey, t, isInitialLoading, startNormalPolling]);
}, [refreshKey, t, isInitialLoading, startNormalPolling, currentPage, serversPerPage]);
// Manually trigger refresh (always refreshes)
const triggerRefresh = useCallback(() => {
@@ -383,12 +426,28 @@ export const ServerProvider: React.FC<{ children: React.ReactNode }> = ({ childr
[t, triggerRefresh],
);
// Handle page change
const handlePageChange = useCallback((page: number) => {
setCurrentPage(page);
}, []);
// Handle servers per page change
const handleServersPerPageChange = useCallback((limit: number) => {
setServersPerPage(limit);
setCurrentPage(1); // Reset to first page when changing page size
}, []);
const value: ServerContextType = {
servers,
error,
setError,
isLoading: isInitialLoading,
fetchAttempts,
pagination,
currentPage,
serversPerPage,
setCurrentPage: handlePageChange,
setServersPerPage: handleServersPerPageChange,
triggerRefresh,
refreshIfNeeded,
handleServerAdd,

View File

@@ -7,9 +7,10 @@ import React, {
ReactNode,
} from 'react';
import { useTranslation } from 'react-i18next';
import { ApiResponse } from '@/types';
import { ApiResponse, BearerKey } from '@/types';
import { useToast } from '@/contexts/ToastContext';
import { apiGet, apiPut } from '@/utils/fetchInterceptor';
import { useAuth } from '@/contexts/AuthContext';
import { apiGet, apiPut, apiPost, apiDelete } from '@/utils/fetchInterceptor';
// Define types for the settings data
interface RoutingConfig {
@@ -66,6 +67,7 @@ interface SystemSettings {
oauthServer?: OAuthServerConfig;
enableSessionRebuild?: boolean;
};
bearerKeys?: BearerKey[];
}
interface TempRoutingConfig {
@@ -82,6 +84,7 @@ interface SettingsContextValue {
oauthServerConfig: OAuthServerConfig;
nameSeparator: string;
enableSessionRebuild: boolean;
bearerKeys: BearerKey[];
loading: boolean;
error: string | null;
setError: React.Dispatch<React.SetStateAction<string | null>>;
@@ -109,6 +112,14 @@ interface SettingsContextValue {
updateNameSeparator: (value: string) => Promise<boolean | undefined>;
updateSessionRebuild: (value: boolean) => Promise<boolean | undefined>;
exportMCPSettings: (serverName?: string) => Promise<any>;
// Bearer key management
refreshBearerKeys: () => Promise<void>;
createBearerKey: (payload: Omit<BearerKey, 'id'>) => Promise<BearerKey | null>;
updateBearerKey: (
id: string,
updates: Partial<Omit<BearerKey, 'id'>>,
) => Promise<BearerKey | null>;
deleteBearerKey: (id: string) => Promise<boolean>;
}
const getDefaultOAuthServerConfig = (): OAuthServerConfig => ({
@@ -143,6 +154,7 @@ interface SettingsProviderProps {
export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children }) => {
const { t } = useTranslation();
const { showToast } = useToast();
const { auth } = useAuth();
const [routingConfig, setRoutingConfig] = useState<RoutingConfig>({
enableGlobalRoute: true,
@@ -183,6 +195,7 @@ export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children })
const [nameSeparator, setNameSeparator] = useState<string>('-');
const [enableSessionRebuild, setEnableSessionRebuild] = useState<boolean>(false);
const [bearerKeys, setBearerKeys] = useState<BearerKey[]>([]);
const [loading, setLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
@@ -279,6 +292,10 @@ export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children })
if (data.success && data.data?.systemConfig?.enableSessionRebuild !== undefined) {
setEnableSessionRebuild(data.data.systemConfig.enableSessionRebuild);
}
if (data.success && Array.isArray(data.data?.bearerKeys)) {
setBearerKeys(data.data.bearerKeys);
}
} catch (error) {
console.error('Failed to fetch settings:', error);
setError(error instanceof Error ? error.message : 'Failed to fetch settings');
@@ -659,11 +676,87 @@ export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children })
}
};
// Bearer key management helpers
const refreshBearerKeys = async () => {
try {
const data: ApiResponse<BearerKey[]> = await apiGet('/auth/keys');
if (data.success && Array.isArray(data.data)) {
setBearerKeys(data.data);
}
} catch (error) {
console.error('Failed to refresh bearer keys:', error);
showToast(t('errors.failedToFetchSettings'));
}
};
const createBearerKey = async (payload: Omit<BearerKey, 'id'>): Promise<BearerKey | null> => {
try {
const data: ApiResponse<BearerKey> = await apiPost('/auth/keys', payload as any);
if (data.success && data.data) {
await refreshBearerKeys();
showToast(t('settings.systemConfigUpdated'));
return data.data;
}
showToast(data.message || t('errors.failedToUpdateRoutingConfig'));
return null;
} catch (error) {
console.error('Failed to create bearer key:', error);
showToast(t('errors.failedToUpdateRoutingConfig'));
return null;
}
};
const updateBearerKey = async (
id: string,
updates: Partial<Omit<BearerKey, 'id'>>,
): Promise<BearerKey | null> => {
try {
const data: ApiResponse<BearerKey> = await apiPut(`/auth/keys/${id}`, updates as any);
if (data.success && data.data) {
await refreshBearerKeys();
showToast(t('settings.systemConfigUpdated'));
return data.data;
}
showToast(data.message || t('errors.failedToUpdateRoutingConfig'));
return null;
} catch (error) {
console.error('Failed to update bearer key:', error);
showToast(t('errors.failedToUpdateRoutingConfig'));
return null;
}
};
const deleteBearerKey = async (id: string): Promise<boolean> => {
try {
const data: ApiResponse = await apiDelete(`/auth/keys/${id}`);
if (data.success) {
await refreshBearerKeys();
showToast(t('settings.systemConfigUpdated'));
return true;
}
showToast(data.message || t('errors.failedToUpdateRoutingConfig'));
return false;
} catch (error) {
console.error('Failed to delete bearer key:', error);
showToast(t('errors.failedToUpdateRoutingConfig'));
return false;
}
};
// Fetch settings when the component mounts or refreshKey changes
useEffect(() => {
fetchSettings();
}, [fetchSettings, refreshKey]);
// Watch for authentication status changes - refetch settings after login
useEffect(() => {
if (auth.isAuthenticated) {
console.log('[SettingsContext] User authenticated, triggering settings refresh');
// When user logs in, trigger a refresh to load settings
triggerRefresh();
}
}, [auth.isAuthenticated, triggerRefresh]);
useEffect(() => {
if (routingConfig) {
setTempRoutingConfig({
@@ -682,6 +775,7 @@ export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children })
oauthServerConfig,
nameSeparator,
enableSessionRebuild,
bearerKeys,
loading,
error,
setError,
@@ -699,6 +793,10 @@ export const SettingsProvider: React.FC<SettingsProviderProps> = ({ children })
updateNameSeparator,
updateSessionRebuild,
exportMCPSettings,
refreshBearerKeys,
createBearerKey,
updateBearerKey,
deleteBearerKey,
};
return <SettingsContext.Provider value={value}>{children}</SettingsContext.Provider>;

View File

@@ -6,6 +6,7 @@ import { useServerData } from '@/hooks/useServerData';
import AddGroupForm from '@/components/AddGroupForm';
import EditGroupForm from '@/components/EditGroupForm';
import GroupCard from '@/components/GroupCard';
import GroupImportForm from '@/components/GroupImportForm';
const GroupsPage: React.FC = () => {
const { t } = useTranslation();
@@ -15,12 +16,13 @@ const GroupsPage: React.FC = () => {
error: groupError,
setError: setGroupError,
deleteGroup,
triggerRefresh
triggerRefresh,
} = useGroupData();
const { servers } = useServerData({ refreshOnMount: true });
const [editingGroup, setEditingGroup] = useState<Group | null>(null);
const [showAddForm, setShowAddForm] = useState(false);
const [showImportForm, setShowImportForm] = useState(false);
const handleEditClick = (group: Group) => {
setEditingGroup(group);
@@ -47,6 +49,11 @@ const GroupsPage: React.FC = () => {
triggerRefresh(); // Refresh the groups list after adding
};
const handleImportSuccess = () => {
setShowImportForm(false);
triggerRefresh(); // Refresh the groups list after import
};
return (
<div>
<div className="flex justify-between items-center mb-8">
@@ -56,11 +63,38 @@ const GroupsPage: React.FC = () => {
onClick={handleAddGroup}
className="px-4 py-2 bg-blue-100 text-blue-800 rounded hover:bg-blue-200 flex items-center btn-primary transition-all duration-200"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4 mr-2" viewBox="0 0 20 20" fill="currentColor">
<path fillRule="evenodd" d="M10 3a1 1 0 00-1 1v5H4a1 1 0 100 2h5v5a1 1 0 102 0v-5h5a1 1 0 100-2h-5V4a1 1 0 00-1-1z" clipRule="evenodd" />
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-4 w-4 mr-2"
viewBox="0 0 20 20"
fill="currentColor"
>
<path
fillRule="evenodd"
d="M10 3a1 1 0 00-1 1v5H4a1 1 0 100 2h5v5a1 1 0 102 0v-5h5a1 1 0 100-2h-5V4a1 1 0 00-1-1z"
clipRule="evenodd"
/>
</svg>
{t('groups.add')}
</button>
<button
onClick={() => setShowImportForm(true)}
className="px-4 py-2 bg-blue-100 text-blue-800 rounded hover:bg-blue-200 flex items-center btn-primary transition-all duration-200"
>
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-4 w-4 mr-2"
viewBox="0 0 20 20"
fill="currentColor"
>
<path
fillRule="evenodd"
d="M3 17a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1zm3.293-7.707a1 1 0 011.414 0L9 10.586V3a1 1 0 112 0v7.586l1.293-1.293a1 1 0 111.414 1.414l-3 3a1 1 0 01-1.414 0l-3-3a1 1 0 010-1.414z"
clipRule="evenodd"
/>
</svg>
{t('groupImport.button')}
</button>
</div>
</div>
@@ -73,9 +107,25 @@ const GroupsPage: React.FC = () => {
{groupsLoading ? (
<div className="bg-white shadow rounded-lg p-6 loading-container">
<div className="flex flex-col items-center justify-center">
<svg className="animate-spin h-10 w-10 text-blue-500 mb-4" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
<svg
className="animate-spin h-10 w-10 text-blue-500 mb-4"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
>
<circle
className="opacity-25"
cx="12"
cy="12"
r="10"
stroke="currentColor"
strokeWidth="4"
></circle>
<path
className="opacity-75"
fill="currentColor"
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
></path>
</svg>
<p className="text-gray-600">{t('app.loading')}</p>
</div>
@@ -98,8 +148,13 @@ const GroupsPage: React.FC = () => {
</div>
)}
{showAddForm && (
<AddGroupForm onAdd={handleAddComplete} onCancel={handleAddComplete} />
{showAddForm && <AddGroupForm onAdd={handleAddComplete} onCancel={handleAddComplete} />}
{showImportForm && (
<GroupImportForm
onSuccess={handleImportSuccess}
onCancel={() => setShowImportForm(false)}
/>
)}
{editingGroup && (
@@ -113,4 +168,4 @@ const GroupsPage: React.FC = () => {
);
};
export default GroupsPage;
export default GroupsPage;

View File

@@ -44,6 +44,24 @@ const LoginPage: React.FC = () => {
return sanitizeReturnUrl(params.get('returnUrl'));
}, [location.search]);
const isServerUnavailableError = useCallback((message?: string) => {
if (!message) return false;
const normalized = message.toLowerCase();
return (
normalized.includes('failed to fetch') ||
normalized.includes('networkerror') ||
normalized.includes('network error') ||
normalized.includes('connection refused') ||
normalized.includes('unable to connect') ||
normalized.includes('fetch error') ||
normalized.includes('econnrefused') ||
normalized.includes('http 500') ||
normalized.includes('internal server error') ||
normalized.includes('proxy error')
);
}, []);
const buildRedirectTarget = useCallback(() => {
if (!returnUrl) {
return '/';
@@ -100,10 +118,20 @@ const LoginPage: React.FC = () => {
redirectAfterLogin();
}
} else {
setError(t('auth.loginFailed'));
const message = result.message;
if (isServerUnavailableError(message)) {
setError(t('auth.serverUnavailable'));
} else {
setError(t('auth.loginFailed'));
}
}
} catch (err) {
setError(t('auth.loginError'));
const message = err instanceof Error ? err.message : undefined;
if (isServerUnavailableError(message)) {
setError(t('auth.serverUnavailable'));
} else {
setError(t('auth.loginError'));
}
} finally {
setLoading(false);
}
@@ -131,13 +159,21 @@ const LoginPage: React.FC = () => {
}}
/>
<div className="pointer-events-none absolute inset-0 -z-10">
<svg className="h-full w-full opacity-[0.08] dark:opacity-[0.12]" xmlns="http://www.w3.org/2000/svg">
<svg
className="h-full w-full opacity-[0.08] dark:opacity-[0.12]"
xmlns="http://www.w3.org/2000/svg"
>
<defs>
<pattern id="grid" width="32" height="32" patternUnits="userSpaceOnUse">
<path d="M 32 0 L 0 0 0 32" fill="none" stroke="currentColor" strokeWidth="0.5" />
</pattern>
</defs>
<rect width="100%" height="100%" fill="url(#grid)" className="text-gray-400 dark:text-gray-300" />
<rect
width="100%"
height="100%"
fill="url(#grid)"
className="text-gray-400 dark:text-gray-300"
/>
</svg>
</div>

View File

@@ -8,6 +8,7 @@ import EditServerForm from '@/components/EditServerForm';
import { useServerData } from '@/hooks/useServerData';
import DxtUploadForm from '@/components/DxtUploadForm';
import JSONImportForm from '@/components/JSONImportForm';
import Pagination from '@/components/ui/Pagination';
const ServersPage: React.FC = () => {
const { t } = useTranslation();
@@ -17,6 +18,11 @@ const ServersPage: React.FC = () => {
error,
setError,
isLoading,
pagination,
currentPage,
serversPerPage,
setCurrentPage,
setServersPerPage,
handleServerAdd,
handleServerEdit,
handleServerRemove,
@@ -151,19 +157,66 @@ const ServersPage: React.FC = () => {
<p className="text-gray-600">{t('app.noServers')}</p>
</div>
) : (
<div className="space-y-6">
{servers.map((server, index) => (
<ServerCard
key={index}
server={server}
onRemove={handleServerRemove}
onEdit={handleEditClick}
onToggle={handleServerToggle}
onRefresh={triggerRefresh}
onReload={handleServerReload}
/>
))}
</div>
<>
<div className="space-y-6">
{servers.map((server, index) => (
<ServerCard
key={index}
server={server}
onRemove={handleServerRemove}
onEdit={handleEditClick}
onToggle={handleServerToggle}
onRefresh={triggerRefresh}
onReload={handleServerReload}
/>
))}
</div>
<div className="flex items-center mb-4">
<div className="flex-[2] text-sm text-gray-500">
{pagination ? (
t('common.showing', {
start: (pagination.page - 1) * pagination.limit + 1,
end: Math.min(pagination.page * pagination.limit, pagination.total),
total: pagination.total
})
) : (
t('common.showing', {
start: 1,
end: servers.length,
total: servers.length
})
)}
</div>
<div className="flex-[4] flex justify-center">
{pagination && pagination.totalPages > 1 && (
<Pagination
currentPage={currentPage}
totalPages={pagination.totalPages}
onPageChange={setCurrentPage}
disabled={isLoading}
/>
)}
</div>
<div className="flex-[2] flex items-center justify-end space-x-2">
<label htmlFor="perPage" className="text-sm text-gray-600">
{t('common.itemsPerPage')}:
</label>
<select
id="perPage"
value={serversPerPage}
onChange={(e) => setServersPerPage(Number(e.target.value))}
disabled={isLoading}
className="border rounded p-1 text-sm btn-secondary outline-none disabled:opacity-50 disabled:cursor-not-allowed"
>
<option value={5}>5</option>
<option value={10}>10</option>
<option value={20}>20</option>
<option value={50}>50</option>
</select>
</div>
</div>
</>
)}
{editingServer && (

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@ export const login = async (credentials: LoginCredentials): Promise<AuthResponse
console.error('Login error:', error);
return {
success: false,
message: 'An error occurred during login',
message: error instanceof Error ? error.message : 'An error occurred during login',
};
}
};

View File

@@ -105,6 +105,17 @@ export interface Prompt {
enabled?: boolean;
}
// Proxychains4 configuration for STDIO servers (Linux/macOS only)
export interface ProxychainsConfig {
enabled?: boolean; // Enable/disable proxychains4 proxy routing
type?: 'socks4' | 'socks5' | 'http'; // Proxy protocol type
host?: string; // Proxy server hostname or IP address
port?: number; // Proxy server port
username?: string; // Proxy authentication username (optional)
password?: string; // Proxy authentication password (optional)
configPath?: string; // Path to custom proxychains4 configuration file (optional)
}
// Server config types
export interface ServerConfig {
type?: 'stdio' | 'sse' | 'streamable-http' | 'openapi';
@@ -123,6 +134,8 @@ export interface ServerConfig {
resetTimeoutOnProgress?: boolean; // Reset timeout on progress notifications
maxTotalTimeout?: number; // Maximum total timeout in milliseconds
}; // MCP request options configuration
// Proxychains4 proxy configuration for STDIO servers (Linux/macOS only, Windows not supported)
proxy?: ProxychainsConfig;
// OAuth authentication for upstream MCP servers
oauth?: {
clientId?: string; // OAuth client ID
@@ -309,6 +322,19 @@ export interface ApiResponse<T = any> {
data?: T;
}
// Bearer authentication key configuration (frontend view model)
export type BearerKeyAccessType = 'all' | 'groups' | 'servers' | 'custom';
export interface BearerKey {
id: string;
name: string;
token: string;
enabled: boolean;
accessType: BearerKeyAccessType;
allowedGroups?: string[];
allowedServers?: string[];
}
// Auth types
export interface IUser {
username: string;

View File

@@ -61,6 +61,7 @@
"emptyFields": "Username and password cannot be empty",
"loginFailed": "Login failed, please check your username and password",
"loginError": "An error occurred during login",
"serverUnavailable": "Unable to connect to the server. Please check your network connection or try again later",
"currentPassword": "Current Password",
"newPassword": "New Password",
"confirmPassword": "Confirm Password",
@@ -247,13 +248,21 @@
"wechat": "WeChat",
"discord": "Discord",
"required": "Required",
"itemsPerPage": "Items per page",
"showing": "Showing {{start}}-{{end}} of {{total}}",
"previous": "Previous",
"next": "Next",
"secret": "Secret",
"default": "Default",
"value": "Value",
"type": "Type",
"repeated": "Repeated",
"valueHint": "Value Hint",
"choices": "Choices"
"choices": "Choices",
"actions": "Actions",
"saving": "Saving...",
"active": "Active",
"inactive": "Inactive"
},
"nav": {
"dashboard": "Dashboard",
@@ -276,7 +285,7 @@
"recentServers": "Recent Servers"
},
"servers": {
"title": "Servers Management"
"title": "Server Management"
},
"groups": {
"title": "Group Management"
@@ -553,6 +562,28 @@
"bearerAuthKey": "Bearer Authentication Key",
"bearerAuthKeyDescription": "The authentication key that will be required in the Bearer token",
"bearerAuthKeyPlaceholder": "Enter bearer authentication key",
"bearerKeysSectionTitle": "Keys",
"bearerKeysSectionDescription": "Manage multiple keys with different access scopes.",
"noBearerKeys": "No keys configured yet.",
"bearerKeyName": "Name",
"bearerKeyToken": "Token",
"bearerKeyEnabled": "Enabled",
"bearerKeyAccessType": "Access scope",
"bearerKeyAccessAll": "All",
"bearerKeyAccessGroups": "Groups",
"bearerKeyAccessServers": "Servers",
"bearerKeyAccessCustom": "Custom",
"bearerKeyAllowedGroups": "Allowed groups",
"bearerKeyAllowedServers": "Allowed servers",
"addBearerKey": "Add key",
"addBearerKeyButton": "Create",
"bearerKeyRequired": "Name and token are required",
"deleteBearerKeyConfirm": "Are you sure you want to delete this key?",
"generate": "Generate",
"selectGroups": "Select Groups",
"selectServers": "Select Servers",
"selectAtLeastOneGroup": "Please select at least one group",
"selectAtLeastOneServer": "Please select at least one server",
"skipAuth": "Skip Authentication",
"skipAuthDescription": "Bypass login requirement for frontend and API access (DEFAULT OFF for security)",
"pythonIndexUrl": "Python Package Repository URL",
@@ -672,6 +703,22 @@
"importFailed": "Failed to import servers",
"partialSuccess": "Imported {{count}} of {{total}} servers successfully. Some servers failed:"
},
"groupImport": {
"button": "Import",
"title": "Import Groups from JSON",
"inputLabel": "Group Configuration JSON",
"inputHelp": "Paste your group configuration JSON. Each group can contain a list of servers.",
"preview": "Preview",
"previewTitle": "Preview Groups to Import",
"import": "Import",
"importing": "Importing...",
"invalidFormat": "Invalid JSON format. The JSON must contain a 'groups' array.",
"missingName": "Each group must have a 'name' field.",
"parseError": "Failed to parse JSON. Please check the format and try again.",
"addFailed": "Failed to add group",
"importFailed": "Failed to import groups",
"partialSuccess": "Imported {{count}} of {{total}} groups successfully. Some groups failed:"
},
"users": {
"add": "Add User",
"addNew": "Add New User",

View File

@@ -61,6 +61,7 @@
"emptyFields": "Le nom d'utilisateur et le mot de passe ne peuvent pas être vides",
"loginFailed": "Échec de la connexion, veuillez vérifier votre nom d'utilisateur et votre mot de passe",
"loginError": "Une erreur est survenue lors de la connexion",
"serverUnavailable": "Impossible de se connecter au serveur. Veuillez vérifier votre connexion réseau ou réessayer plus tard",
"currentPassword": "Mot de passe actuel",
"newPassword": "Nouveau mot de passe",
"confirmPassword": "Confirmer le mot de passe",
@@ -247,6 +248,10 @@
"github": "GitHub",
"wechat": "WeChat",
"discord": "Discord",
"itemsPerPage": "Éléments par page",
"showing": "Affichage de {{start}}-{{end}} sur {{total}}",
"previous": "Précédent",
"next": "Suivant",
"required": "Requis",
"secret": "Secret",
"default": "Défaut",
@@ -254,7 +259,11 @@
"type": "Type",
"repeated": "Répété",
"valueHint": "Indice de valeur",
"choices": "Choix"
"choices": "Choix",
"actions": "Actions",
"saving": "Enregistrement...",
"active": "Actif",
"inactive": "Inactif"
},
"nav": {
"dashboard": "Tableau de bord",
@@ -554,6 +563,28 @@
"bearerAuthKey": "Clé d'authentification Bearer",
"bearerAuthKeyDescription": "La clé d'authentification qui sera requise dans le jeton Bearer",
"bearerAuthKeyPlaceholder": "Entrez la clé d'authentification Bearer",
"bearerKeysSectionTitle": "Clés",
"bearerKeysSectionDescription": "Gérez plusieurs clés avec différentes portées daccès.",
"noBearerKeys": "Aucune clé configurée pour le moment.",
"bearerKeyName": "Nom",
"bearerKeyToken": "Jeton",
"bearerKeyEnabled": "Activée",
"bearerKeyAccessType": "Portée daccès",
"bearerKeyAccessAll": "Toutes",
"bearerKeyAccessGroups": "Groupes",
"bearerKeyAccessServers": "Serveurs",
"bearerKeyAccessCustom": "Personnalisée",
"bearerKeyAllowedGroups": "Groupes autorisés",
"bearerKeyAllowedServers": "Serveurs autorisés",
"addBearerKey": "Ajouter une clé",
"addBearerKeyButton": "Créer",
"bearerKeyRequired": "Le nom et le jeton sont obligatoires",
"deleteBearerKeyConfirm": "Voulez-vous vraiment supprimer cette clé ?",
"generate": "Générer",
"selectGroups": "Sélectionner des groupes",
"selectServers": "Sélectionner des serveurs",
"selectAtLeastOneGroup": "Veuillez sélectionner au moins un groupe",
"selectAtLeastOneServer": "Veuillez sélectionner au moins un serveur",
"skipAuth": "Ignorer l'authentification",
"skipAuthDescription": "Contourner l'exigence de connexion pour l'accès au frontend et à l'API (DÉSACTIVÉ PAR DÉFAUT pour des raisons de sécurité)",
"pythonIndexUrl": "URL du dépôt de paquets Python",
@@ -673,6 +704,22 @@
"importFailed": "Échec de l'importation des serveurs",
"partialSuccess": "{{count}} serveur(s) sur {{total}} importé(s) avec succès. Certains serveurs ont échoué :"
},
"groupImport": {
"button": "Importer",
"title": "Importer des groupes depuis JSON",
"inputLabel": "Configuration JSON des groupes",
"inputHelp": "Collez votre configuration JSON de groupes. Chaque groupe peut contenir une liste de serveurs.",
"preview": "Aperçu",
"previewTitle": "Aperçu des groupes à importer",
"import": "Importer",
"importing": "Importation en cours...",
"invalidFormat": "Format JSON invalide. Le JSON doit contenir un tableau 'groups'.",
"missingName": "Chaque groupe doit avoir un champ 'name'.",
"parseError": "Échec de l'analyse du JSON. Veuillez vérifier le format et réessayer.",
"addFailed": "Échec de l'ajout du groupe",
"importFailed": "Échec de l'importation des groupes",
"partialSuccess": "{{count}} groupe(s) sur {{total}} importé(s) avec succès. Certains groupes ont échoué :"
},
"users": {
"add": "Ajouter un utilisateur",
"addNew": "Ajouter un nouvel utilisateur",

View File

@@ -61,6 +61,7 @@
"emptyFields": "Kullanıcı adı ve şifre boş olamaz",
"loginFailed": "Giriş başarısız, lütfen kullanıcı adınızı ve şifrenizi kontrol edin",
"loginError": "Giriş sırasında bir hata oluştu",
"serverUnavailable": "Sunucuya bağlanılamıyor. Lütfen ağ bağlantınızı kontrol edin veya daha sonra tekrar deneyin",
"currentPassword": "Mevcut Şifre",
"newPassword": "Yeni Şifre",
"confirmPassword": "Şifreyi Onayla",
@@ -247,6 +248,10 @@
"github": "GitHub",
"wechat": "WeChat",
"discord": "Discord",
"itemsPerPage": "Sayfa başına öğe",
"showing": "{{total}} öğeden {{start}}-{{end}} gösteriliyor",
"previous": "Önceki",
"next": "Sonraki",
"required": "Gerekli",
"secret": "Gizli",
"default": "Varsayılan",
@@ -254,7 +259,11 @@
"type": "Tür",
"repeated": "Tekrarlanan",
"valueHint": "Değer İpucu",
"choices": "Seçenekler"
"choices": "Seçenekler",
"actions": "Eylemler",
"saving": "Kaydediliyor...",
"active": "Aktif",
"inactive": "Pasif"
},
"nav": {
"dashboard": "Kontrol Paneli",
@@ -554,6 +563,28 @@
"bearerAuthKey": "Bearer Kimlik Doğrulama Anahtarı",
"bearerAuthKeyDescription": "Bearer token'da gerekli olacak kimlik doğrulama anahtarı",
"bearerAuthKeyPlaceholder": "Bearer kimlik doğrulama anahtarını girin",
"bearerKeysSectionTitle": "Anahtarlar",
"bearerKeysSectionDescription": "Farklı erişim kapsamlarına sahip birden fazla anahtarı yönetin.",
"noBearerKeys": "Henüz yapılandırılmış herhangi bir anahtar yok.",
"bearerKeyName": "Ad",
"bearerKeyToken": "Token",
"bearerKeyEnabled": "Etkin",
"bearerKeyAccessType": "Erişim kapsamı",
"bearerKeyAccessAll": "Tümü",
"bearerKeyAccessGroups": "Gruplar",
"bearerKeyAccessServers": "Sunucular",
"bearerKeyAccessCustom": "Özel",
"bearerKeyAllowedGroups": "İzin verilen gruplar",
"bearerKeyAllowedServers": "İzin verilen sunucular",
"addBearerKey": "Anahtar ekle",
"addBearerKeyButton": "Oluştur",
"bearerKeyRequired": "Ad ve token zorunludur",
"deleteBearerKeyConfirm": "Bu anahtarı silmek istediğinizden emin misiniz?",
"generate": "Oluştur",
"selectGroups": "Grupları Seç",
"selectServers": "Sunucuları Seç",
"selectAtLeastOneGroup": "Lütfen en az bir grup seçin",
"selectAtLeastOneServer": "Lütfen en az bir sunucu seçin",
"skipAuth": "Kimlik Doğrulamayı Atla",
"skipAuthDescription": "Arayüz ve API erişimi için giriş gereksinimini atla (Güvenlik için VARSAYILAN KAPALI)",
"pythonIndexUrl": "Python Paket Deposu URL'si",
@@ -673,6 +704,22 @@
"importFailed": "Sunucular içe aktarılamadı",
"partialSuccess": "{{total}} sunucudan {{count}} tanesi başarıyla içe aktarıldı. Bazı sunucular başarısız oldu:"
},
"groupImport": {
"button": "İçe Aktar",
"title": "JSON'dan Grupları İçe Aktar",
"inputLabel": "Grup Yapılandırma JSON",
"inputHelp": "Grup yapılandırma JSON'unuzu yapıştırın. Her grup bir sunucu listesi içerebilir.",
"preview": "Önizle",
"previewTitle": "İçe Aktarılacak Grupları Önizle",
"import": "İçe Aktar",
"importing": "İçe aktarılıyor...",
"invalidFormat": "Geçersiz JSON formatı. JSON bir 'groups' dizisi içermelidir.",
"missingName": "Her grubun bir 'name' alanı olmalıdır.",
"parseError": "JSON ayrıştırılamadı. Lütfen formatı kontrol edip tekrar deneyin.",
"addFailed": "Grup eklenemedi",
"importFailed": "Gruplar içe aktarılamadı",
"partialSuccess": "{{total}} gruptan {{count}} tanesi başarıyla içe aktarıldı. Bazı gruplar başarısız oldu:"
},
"users": {
"add": "Kullanıcı Ekle",
"addNew": "Yeni Kullanıcı Ekle",

View File

@@ -61,6 +61,7 @@
"emptyFields": "用户名和密码不能为空",
"loginFailed": "登录失败,请检查用户名和密码",
"loginError": "登录过程中出现错误",
"serverUnavailable": "无法连接到服务器,请检查网络连接或稍后再试",
"currentPassword": "当前密码",
"newPassword": "新密码",
"confirmPassword": "确认密码",
@@ -247,6 +248,10 @@
"dismiss": "忽略",
"github": "GitHub",
"wechat": "微信",
"itemsPerPage": "每页显示",
"showing": "显示第 {{start}}-{{end}} 条,共 {{total}} 条",
"previous": "上一页",
"next": "下一页",
"discord": "Discord",
"required": "必填",
"secret": "敏感",
@@ -255,7 +260,11 @@
"type": "类型",
"repeated": "可重复",
"valueHint": "值提示",
"choices": "可选值"
"choices": "可选值",
"actions": "操作",
"saving": "保存中...",
"active": "已激活",
"inactive": "未激活"
},
"nav": {
"dashboard": "仪表盘",
@@ -289,7 +298,7 @@
"routeConfig": "安全配置",
"installConfig": "安装",
"smartRouting": "智能路由",
"oauthServer": "OAuth 服务器"
"oauthServer": "OAuth"
},
"groups": {
"title": "分组管理"
@@ -555,6 +564,28 @@
"bearerAuthKey": "Bearer 认证密钥",
"bearerAuthKeyDescription": "Bearer 令牌中需要携带的认证密钥",
"bearerAuthKeyPlaceholder": "请输入 Bearer 认证密钥",
"bearerKeysSectionTitle": "密钥",
"bearerKeysSectionDescription": "管理多条密钥,并为不同密钥配置不同的访问范围。",
"noBearerKeys": "当前还没有配置任何密钥。",
"bearerKeyName": "名称",
"bearerKeyToken": "密钥值",
"bearerKeyEnabled": "启用",
"bearerKeyAccessType": "访问范围",
"bearerKeyAccessAll": "全部",
"bearerKeyAccessGroups": "指定分组",
"bearerKeyAccessServers": "指定服务器",
"bearerKeyAccessCustom": "自定义",
"bearerKeyAllowedGroups": "允许访问的分组",
"bearerKeyAllowedServers": "允许访问的服务器",
"addBearerKey": "新增密钥",
"addBearerKeyButton": "创建",
"bearerKeyRequired": "名称和密钥值为必填项",
"deleteBearerKeyConfirm": "确定要删除这条密钥吗?",
"generate": "生成",
"selectGroups": "选择分组",
"selectServers": "选择服务器",
"selectAtLeastOneGroup": "请至少选择一个分组",
"selectAtLeastOneServer": "请至少选择一个服务",
"skipAuth": "免登录开关",
"skipAuthDescription": "跳过前端和 API 访问的登录要求(默认关闭确保安全性)",
"pythonIndexUrl": "Python 包仓库地址",
@@ -675,6 +706,22 @@
"importFailed": "导入服务器失败",
"partialSuccess": "成功导入 {{count}} / {{total}} 个服务器。部分服务器失败:"
},
"groupImport": {
"button": "导入",
"title": "从 JSON 导入分组",
"inputLabel": "分组配置 JSON",
"inputHelp": "粘贴您的分组配置 JSON。每个分组可以包含一个服务器列表。",
"preview": "预览",
"previewTitle": "预览要导入的分组",
"import": "导入",
"importing": "导入中...",
"invalidFormat": "无效的 JSON 格式。JSON 必须包含 'groups' 数组。",
"missingName": "每个分组必须有 'name' 字段。",
"parseError": "解析 JSON 失败。请检查格式后重试。",
"addFailed": "添加分组失败",
"importFailed": "导入分组失败",
"partialSuccess": "成功导入 {{count}} / {{total}} 个分组。部分分组失败:"
},
"users": {
"add": "添加",
"addNew": "添加新用户",

View File

@@ -63,5 +63,6 @@
"requiresAuthentication": false
}
}
}
},
"bearerKeys": []
}

View File

@@ -46,7 +46,7 @@
"license": "ISC",
"dependencies": {
"@apidevtools/swagger-parser": "^12.0.0",
"@modelcontextprotocol/sdk": "^1.20.2",
"@modelcontextprotocol/sdk": "^1.25.1",
"@node-oauth/oauth2-server": "^5.2.1",
"@types/adm-zip": "^0.5.7",
"@types/bcrypt": "^6.0.0",
@@ -57,7 +57,7 @@
"bcrypt": "^6.0.0",
"bcryptjs": "^3.0.2",
"cors": "^2.8.5",
"dotenv": "^16.6.1",
"dotenv": "^17.2.3",
"dotenv-expand": "^12.0.2",
"express": "^4.21.2",
"express-validator": "^7.3.1",
@@ -73,6 +73,7 @@
"postgres": "^3.4.7",
"reflect-metadata": "^0.2.2",
"typeorm": "^0.3.26",
"undici": "^7.16.0",
"uuid": "^11.1.0"
},
"devDependencies": {
@@ -107,11 +108,11 @@
"jest-environment-node": "^30.0.5",
"jest-mock-extended": "4.0.0",
"lucide-react": "^0.552.0",
"next": "^15.5.0",
"next": "^16.1.1",
"postcss": "^8.5.6",
"prettier": "^3.6.2",
"react": "19.2.0",
"react-dom": "19.2.0",
"react": "19.2.1",
"react-dom": "19.2.1",
"react-i18next": "^15.7.2",
"react-router-dom": "^7.8.2",
"supertest": "^7.1.4",
@@ -133,6 +134,8 @@
"overrides": {
"brace-expansion@1.1.11": "1.1.12",
"brace-expansion@2.0.1": "2.0.2",
"glob@10.4.5": "10.5.0",
"js-yaml": "4.1.1",
"jws@3.2.2": "4.0.1"
}
}

676
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,169 @@
import { Request, Response } from 'express';
import { ApiResponse, BearerKey } from '../types/index.js';
import { getBearerKeyDao, getSystemConfigDao } from '../dao/index.js';
const requireAdmin = async (req: Request, res: Response): Promise<boolean> => {
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
if (systemConfig?.routing?.skipAuth) {
return true;
}
const user = (req as any).user;
if (!user || !user.isAdmin) {
res.status(403).json({
success: false,
message: 'Admin privileges required',
});
return false;
}
return true;
};
export const getBearerKeys = async (req: Request, res: Response): Promise<void> => {
if (!(await requireAdmin(req, res))) return;
try {
const dao = getBearerKeyDao();
const keys = await dao.findAll();
const response: ApiResponse = {
success: true,
data: keys,
};
res.json(response);
} catch (error) {
console.error('Failed to get bearer keys:', error);
res.status(500).json({
success: false,
message: 'Failed to get bearer keys',
});
}
};
export const createBearerKey = async (req: Request, res: Response): Promise<void> => {
if (!(await requireAdmin(req, res))) return;
try {
const { name, token, enabled, accessType, allowedGroups, allowedServers } =
req.body as Partial<BearerKey>;
if (!name || typeof name !== 'string') {
res.status(400).json({ success: false, message: 'Key name is required' });
return;
}
if (!token || typeof token !== 'string') {
res.status(400).json({ success: false, message: 'Token value is required' });
return;
}
if (!accessType || !['all', 'groups', 'servers', 'custom'].includes(accessType)) {
res.status(400).json({ success: false, message: 'Invalid accessType' });
return;
}
const dao = getBearerKeyDao();
const key = await dao.create({
name,
token,
enabled: enabled ?? true,
accessType,
allowedGroups: Array.isArray(allowedGroups) ? allowedGroups : [],
allowedServers: Array.isArray(allowedServers) ? allowedServers : [],
});
const response: ApiResponse = {
success: true,
data: key,
};
res.status(201).json(response);
} catch (error) {
console.error('Failed to create bearer key:', error);
res.status(500).json({
success: false,
message: 'Failed to create bearer key',
});
}
};
export const updateBearerKey = async (req: Request, res: Response): Promise<void> => {
if (!(await requireAdmin(req, res))) return;
try {
const { id } = req.params;
if (!id) {
res.status(400).json({ success: false, message: 'Key id is required' });
return;
}
const { name, token, enabled, accessType, allowedGroups, allowedServers } =
req.body as Partial<BearerKey>;
const updates: Partial<BearerKey> = {};
if (name !== undefined) updates.name = name;
if (token !== undefined) updates.token = token;
if (enabled !== undefined) updates.enabled = enabled;
if (accessType !== undefined) {
if (!['all', 'groups', 'servers', 'custom'].includes(accessType)) {
res.status(400).json({ success: false, message: 'Invalid accessType' });
return;
}
updates.accessType = accessType as BearerKey['accessType'];
}
if (allowedGroups !== undefined) {
updates.allowedGroups = Array.isArray(allowedGroups) ? allowedGroups : [];
}
if (allowedServers !== undefined) {
updates.allowedServers = Array.isArray(allowedServers) ? allowedServers : [];
}
const dao = getBearerKeyDao();
const updated = await dao.update(id, updates);
if (!updated) {
res.status(404).json({ success: false, message: 'Bearer key not found' });
return;
}
const response: ApiResponse = {
success: true,
data: updated,
};
res.json(response);
} catch (error) {
console.error('Failed to update bearer key:', error);
res.status(500).json({
success: false,
message: 'Failed to update bearer key',
});
}
};
export const deleteBearerKey = async (req: Request, res: Response): Promise<void> => {
if (!(await requireAdmin(req, res))) return;
try {
const { id } = req.params;
if (!id) {
res.status(400).json({ success: false, message: 'Key id is required' });
return;
}
const dao = getBearerKeyDao();
const deleted = await dao.delete(id);
if (!deleted) {
res.status(404).json({ success: false, message: 'Bearer key not found' });
return;
}
const response: ApiResponse = {
success: true,
};
res.json(response);
} catch (error) {
console.error('Failed to delete bearer key:', error);
res.status(500).json({
success: false,
message: 'Failed to delete bearer key',
});
}
};

View File

@@ -1,10 +1,19 @@
import { Request, Response } from 'express';
import config from '../config/index.js';
import { loadSettings, loadOriginalSettings } from '../config/index.js';
import { loadSettings } from '../config/index.js';
import { getDataService } from '../services/services.js';
import { DataService } from '../services/dataService.js';
import { IUser } from '../types/index.js';
import { getServerDao } from '../dao/DaoFactory.js';
import {
getGroupDao,
getOAuthClientDao,
getOAuthTokenDao,
getServerDao,
getSystemConfigDao,
getUserConfigDao,
getUserDao,
getBearerKeyDao,
} from '../dao/DaoFactory.js';
const dataService: DataService = getDataService();
@@ -128,8 +137,43 @@ export const getMcpSettingsJson = async (req: Request, res: Response): Promise<v
},
});
} else {
// Return full settings
const settings = loadOriginalSettings();
// Return full settings via DAO layer (supports both file and database modes)
const [
servers,
users,
groups,
systemConfig,
userConfigs,
oauthClients,
oauthTokens,
bearerKeys,
] = await Promise.all([
getServerDao().findAll(),
getUserDao().findAll(),
getGroupDao().findAll(),
getSystemConfigDao().get(),
getUserConfigDao().getAll(),
getOAuthClientDao().findAll(),
getOAuthTokenDao().findAll(),
getBearerKeyDao().findAll(),
]);
const mcpServers: Record<string, any> = {};
for (const { name: serverConfigName, ...config } of servers) {
mcpServers[serverConfigName] = removeNullValues(config);
}
const settings = {
mcpServers,
users,
groups,
systemConfig,
userConfigs,
oauthClients,
oauthTokens,
bearerKeys,
};
res.json({
success: true,
data: settings,

View File

@@ -1,5 +1,11 @@
import { Request, Response } from 'express';
import { ApiResponse } from '../types/index.js';
import {
ApiResponse,
AddGroupRequest,
BatchCreateGroupsRequest,
BatchCreateGroupsResponse,
BatchGroupResult,
} from '../types/index.js';
import {
getAllGroups,
getGroupByIdOrName,
@@ -106,6 +112,143 @@ export const createNewGroup = async (req: Request, res: Response): Promise<void>
}
};
// Batch create groups - validates and creates multiple groups in one request
export const batchCreateGroups = async (req: Request, res: Response): Promise<void> => {
try {
const { groups } = req.body as BatchCreateGroupsRequest;
// Validate request body
if (!groups || !Array.isArray(groups)) {
res.status(400).json({
success: false,
message: 'Request body must contain a "groups" array',
});
return;
}
if (groups.length === 0) {
res.status(400).json({
success: false,
message: 'Groups array cannot be empty',
});
return;
}
// Helper function to validate a single group configuration
const validateGroupConfig = (group: AddGroupRequest): { valid: boolean; message?: string } => {
if (!group.name || typeof group.name !== 'string') {
return { valid: false, message: 'Group name is required and must be a string' };
}
if (group.description !== undefined && typeof group.description !== 'string') {
return { valid: false, message: 'Group description must be a string' };
}
if (group.servers !== undefined && !Array.isArray(group.servers)) {
return { valid: false, message: 'Group servers must be an array' };
}
// Validate server configurations if provided in new format
if (group.servers) {
for (const server of group.servers) {
if (typeof server === 'object' && server !== null) {
if (!server.name || typeof server.name !== 'string') {
return {
valid: false,
message: 'Server configuration must have a name property',
};
}
if (
server.tools !== undefined &&
server.tools !== 'all' &&
!Array.isArray(server.tools)
) {
return {
valid: false,
message: 'Server tools must be "all" or an array of tool names',
};
}
}
}
}
return { valid: true };
};
// Process each group
const results: BatchGroupResult[] = [];
let successCount = 0;
let failureCount = 0;
// Get current user for owner field
const currentUser = (req as any).user;
const defaultOwner = currentUser?.username || 'admin';
for (const groupData of groups) {
const { name, description, servers } = groupData;
// Validate group configuration
const validation = validateGroupConfig(groupData);
if (!validation.valid) {
results.push({
name: name || 'unknown',
success: false,
message: validation.message,
});
failureCount++;
continue;
}
try {
const serverList = Array.isArray(servers) ? servers : [];
const newGroup = await createGroup(name, description, serverList, defaultOwner);
if (newGroup) {
results.push({
name,
success: true,
message: 'Group created successfully',
});
successCount++;
} else {
results.push({
name,
success: false,
message: 'Failed to create group or group name already exists',
});
failureCount++;
}
} catch (error) {
results.push({
name,
success: false,
message: error instanceof Error ? error.message : 'Failed to create group',
});
failureCount++;
}
}
// Return response
const response: BatchCreateGroupsResponse = {
success: successCount > 0,
successCount,
failureCount,
results,
};
// Use 207 Multi-Status if there were partial failures, 200 if all succeeded
const statusCode = failureCount > 0 && successCount > 0 ? 207 : successCount > 0 ? 200 : 400;
res.status(statusCode).json(response);
} catch (error) {
console.error('Batch create groups error:', error);
res.status(500).json({
success: false,
message: 'Internal server error',
});
}
};
// Update an existing group
export const updateExistingGroup = async (req: Request, res: Response): Promise<void> => {
try {

View File

@@ -1,5 +1,14 @@
import { Request, Response } from 'express';
import { ApiResponse, AddServerRequest, McpSettings } from '../types/index.js';
import {
ApiResponse,
AddServerRequest,
McpSettings,
BatchCreateServersRequest,
BatchCreateServersResponse,
BatchServerResult,
ServerConfig,
ServerInfo,
} from '../types/index.js';
import {
getServersInfo,
addServer,
@@ -15,13 +24,67 @@ import { syncAllServerToolsEmbeddings } from '../services/vectorSearchService.js
import { createSafeJSON } from '../utils/serialization.js';
import { cloneDefaultOAuthServerConfig } from '../constants/oauthServerDefaults.js';
import { getServerDao, getGroupDao, getSystemConfigDao } from '../dao/DaoFactory.js';
import { getBearerKeyDao } from '../dao/DaoFactory.js';
import { UserContextService } from '../services/userContextService.js';
export const getAllServers = async (_: Request, res: Response): Promise<void> => {
export const getAllServers = async (req: Request, res: Response): Promise<void> => {
try {
const serversInfo = await getServersInfo();
// Parse pagination parameters from query string
const page = req.query.page ? parseInt(req.query.page as string, 10) : 1;
const limit = req.query.limit ? parseInt(req.query.limit as string, 10) : undefined;
// Validate pagination parameters
if (page < 1) {
res.status(400).json({
success: false,
message: 'Page number must be greater than 0',
});
return;
}
if (limit !== undefined && (limit < 1 || limit > 1000)) {
res.status(400).json({
success: false,
message: 'Limit must be between 1 and 1000',
});
return;
}
// Get current user for filtering
const currentUser = UserContextService.getInstance().getCurrentUser();
const isAdmin = !currentUser || currentUser.isAdmin;
// Get servers info with pagination if limit is specified
let serversInfo: Omit<ServerInfo, 'client' | 'transport'>[];
let pagination = undefined;
if (limit !== undefined) {
// Use DAO layer pagination with proper filtering
const serverDao = getServerDao();
const paginatedResult = isAdmin
? await serverDao.findAllPaginated(page, limit)
: await serverDao.findByOwnerPaginated(currentUser!.username, page, limit);
// Get runtime info for paginated servers
serversInfo = await getServersInfo(page, limit, currentUser);
pagination = {
page: paginatedResult.page,
limit: paginatedResult.limit,
total: paginatedResult.total,
totalPages: paginatedResult.totalPages,
hasNextPage: paginatedResult.page < paginatedResult.totalPages,
hasPrevPage: paginatedResult.page > 1,
};
} else {
// No pagination, get all servers (will be filtered by mcpService)
serversInfo = await getServersInfo();
}
const response: ApiResponse = {
success: true,
data: createSafeJSON(serversInfo),
...(pagination && { pagination }),
};
res.json(response);
} catch (error) {
@@ -57,12 +120,31 @@ export const getAllSettings = async (_: Request, res: Response): Promise<void> =
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
// Ensure smart routing config has DB URL set if environment variable is present
const dbUrlEnv = process.env.DB_URL || '';
if (!systemConfig.smartRouting) {
systemConfig.smartRouting = {
enabled: false,
dbUrl: dbUrlEnv ? '${DB_URL}' : '',
openaiApiBaseUrl: '',
openaiApiKey: '',
openaiApiEmbeddingModel: '',
};
} else if (!systemConfig.smartRouting.dbUrl) {
systemConfig.smartRouting.dbUrl = dbUrlEnv ? '${DB_URL}' : '';
}
// Get bearer auth keys from DAO
const bearerKeyDao = getBearerKeyDao();
const bearerKeys = await bearerKeyDao.findAll();
// Merge all data into settings object
const settings: McpSettings = {
...fileSettings,
mcpServers,
groups,
systemConfig,
bearerKeys,
};
const response: ApiResponse = {
@@ -189,6 +271,177 @@ export const createServer = async (req: Request, res: Response): Promise<void> =
}
};
// Batch create servers - validates and creates multiple servers in one request
export const batchCreateServers = async (req: Request, res: Response): Promise<void> => {
try {
const { servers } = req.body as BatchCreateServersRequest;
// Validate request body
if (!servers || !Array.isArray(servers)) {
res.status(400).json({
success: false,
message: 'Request body must contain a "servers" array',
});
return;
}
if (servers.length === 0) {
res.status(400).json({
success: false,
message: 'Servers array cannot be empty',
});
return;
}
// Helper function to validate a single server configuration
const validateServerConfig = (
name: string,
config: ServerConfig,
): { valid: boolean; message?: string } => {
if (!name || typeof name !== 'string') {
return { valid: false, message: 'Server name is required and must be a string' };
}
if (!config || typeof config !== 'object') {
return { valid: false, message: 'Server configuration is required and must be an object' };
}
if (
!config.url &&
!config.openapi?.url &&
!config.openapi?.schema &&
(!config.command || !config.args)
) {
return {
valid: false,
message:
'Server configuration must include either a URL, OpenAPI specification URL or schema, or command with arguments',
};
}
// Validate server type if specified
if (config.type && !['stdio', 'sse', 'streamable-http', 'openapi'].includes(config.type)) {
return {
valid: false,
message: 'Server type must be one of: stdio, sse, streamable-http, openapi',
};
}
// Validate URL is provided for sse and streamable-http types
if ((config.type === 'sse' || config.type === 'streamable-http') && !config.url) {
return { valid: false, message: `URL is required for ${config.type} server type` };
}
// Validate OpenAPI specification URL or schema is provided for openapi type
if (config.type === 'openapi' && !config.openapi?.url && !config.openapi?.schema) {
return {
valid: false,
message: 'OpenAPI specification URL or schema is required for openapi server type',
};
}
// Validate headers if provided
if (config.headers && typeof config.headers !== 'object') {
return { valid: false, message: 'Headers must be an object' };
}
// Validate that headers are only used with sse, streamable-http, and openapi types
if (config.headers && config.type === 'stdio') {
return { valid: false, message: 'Headers are not supported for stdio server type' };
}
return { valid: true };
};
// Process each server
const results: BatchServerResult[] = [];
let successCount = 0;
let failureCount = 0;
// Get current user for owner field
const currentUser = (req as any).user;
const defaultOwner = currentUser?.username || 'admin';
for (const server of servers) {
const { name, config } = server;
// Validate server configuration
const validation = validateServerConfig(name, config);
if (!validation.valid) {
results.push({
name: name || 'unknown',
success: false,
message: validation.message,
});
failureCount++;
continue;
}
try {
// Set default keep-alive interval for SSE servers if not specified
if ((config.type === 'sse' || (!config.type && config.url)) && !config.keepAliveInterval) {
config.keepAliveInterval = 60000; // Default 60 seconds for SSE servers
}
// Set owner property if not provided
if (!config.owner) {
config.owner = defaultOwner;
}
// Attempt to add server
const result = await addServer(name, config);
if (result.success) {
results.push({
name,
success: true,
});
successCount++;
} else {
results.push({
name,
success: false,
message: result.message || 'Failed to add server',
});
failureCount++;
}
} catch (error) {
results.push({
name,
success: false,
message: error instanceof Error ? error.message : 'Internal server error',
});
failureCount++;
}
}
// Notify tool changes if any server was added successfully
if (successCount > 0) {
notifyToolChanged();
}
// Prepare response
const response: ApiResponse<BatchCreateServersResponse> = {
success: successCount > 0, // Success if at least one server was created
data: {
success: successCount > 0,
successCount,
failureCount,
results,
},
};
// Return 207 Multi-Status if there were partial failures, 200 if all succeeded, 400 if all failed
const statusCode = failureCount === 0 ? 200 : successCount === 0 ? 400 : 207;
res.status(statusCode).json(response);
} catch (error) {
console.error('Batch create servers error:', error);
res.status(500).json({
success: false,
message: 'Internal server error',
});
}
};
export const deleteServer = async (req: Request, res: Response): Promise<void> => {
try {
const { name } = req.params;
@@ -224,7 +477,7 @@ export const deleteServer = async (req: Request, res: Response): Promise<void> =
export const updateServer = async (req: Request, res: Response): Promise<void> => {
try {
const { name } = req.params;
const { config } = req.body;
const { config, newName } = req.body;
if (!name) {
res.status(400).json({
success: false,
@@ -311,12 +564,52 @@ export const updateServer = async (req: Request, res: Response): Promise<void> =
config.owner = currentUser?.username || 'admin';
}
const result = await addOrUpdateServer(name, config, true); // Allow override for updates
// Check if server name is being changed
const isRenaming = newName && newName !== name;
// If renaming, validate the new name and update references
if (isRenaming) {
const serverDao = getServerDao();
// Check if new name already exists
if (await serverDao.exists(newName)) {
res.status(400).json({
success: false,
message: `Server name '${newName}' already exists`,
});
return;
}
// Rename the server
const renamed = await serverDao.rename(name, newName);
if (!renamed) {
res.status(404).json({
success: false,
message: 'Server not found',
});
return;
}
// Update references in groups
const groupDao = getGroupDao();
await groupDao.updateServerName(name, newName);
// Update references in bearer keys
const bearerKeyDao = getBearerKeyDao();
await bearerKeyDao.updateServerName(name, newName);
}
// Use the final server name (new name if renaming, otherwise original name)
const finalName = isRenaming ? newName : name;
const result = await addOrUpdateServer(finalName, config, true); // Allow override for updates
if (result.success) {
notifyToolChanged(name);
notifyToolChanged(finalName);
res.json({
success: true,
message: 'Server updated successfully',
message: isRenaming
? `Server renamed and updated successfully`
: 'Server updated successfully',
});
} else {
res.status(404).json({
@@ -793,7 +1086,8 @@ export const updateSystemConfig = async (req: Request, res: Response): Promise<v
if (typeof smartRouting.enabled === 'boolean') {
// If enabling Smart Routing, validate required fields
if (smartRouting.enabled) {
const currentDbUrl = smartRouting.dbUrl || systemConfig.smartRouting.dbUrl;
const currentDbUrl =
process.env.DB_URL || smartRouting.dbUrl || systemConfig.smartRouting.dbUrl;
const currentOpenaiApiKey =
smartRouting.openaiApiKey || systemConfig.smartRouting.openaiApiKey;

159
src/dao/BearerKeyDao.ts Normal file
View File

@@ -0,0 +1,159 @@
import { randomUUID } from 'node:crypto';
import { BearerKey } from '../types/index.js';
import { JsonFileBaseDao } from './base/JsonFileBaseDao.js';
/**
* DAO interface for bearer authentication keys
*/
export interface BearerKeyDao {
findAll(): Promise<BearerKey[]>;
findEnabled(): Promise<BearerKey[]>;
findById(id: string): Promise<BearerKey | undefined>;
findByToken(token: string): Promise<BearerKey | undefined>;
create(data: Omit<BearerKey, 'id'>): Promise<BearerKey>;
update(id: string, data: Partial<Omit<BearerKey, 'id'>>): Promise<BearerKey | null>;
delete(id: string): Promise<boolean>;
/**
* Update server name in all bearer keys (when server is renamed)
*/
updateServerName(oldName: string, newName: string): Promise<number>;
}
/**
* JSON file-based BearerKey DAO implementation
* Stores keys under the top-level `bearerKeys` field in mcp_settings.json
* and performs one-time migration from legacy routing.enableBearerAuth/bearerAuthKey.
*/
export class BearerKeyDaoImpl extends JsonFileBaseDao implements BearerKeyDao {
private async loadKeysWithMigration(): Promise<BearerKey[]> {
const settings = await this.loadSettings();
// Treat an existing array (including an empty array) as already migrated.
// Otherwise, when there are no configured keys, we'd rewrite mcp_settings.json
// on every request, which also clears the global settings cache.
if (Array.isArray(settings.bearerKeys)) {
return settings.bearerKeys;
}
// Perform one-time migration from legacy routing config if present
const routing = settings.systemConfig?.routing || {};
const enableBearerAuth: boolean = !!routing.enableBearerAuth;
const rawKey: string = (routing.bearerAuthKey || '').trim();
let migrated: BearerKey[] = [];
if (rawKey) {
// Cases 2 and 3 in migration rules
migrated = [
{
id: randomUUID(),
name: 'default',
token: rawKey,
enabled: enableBearerAuth,
accessType: 'all',
allowedGroups: [],
allowedServers: [],
},
];
}
// Cases 1 and 4 both result in empty keys list
settings.bearerKeys = migrated;
await this.saveSettings(settings);
return migrated;
}
private async saveKeys(keys: BearerKey[]): Promise<void> {
const settings = await this.loadSettings();
settings.bearerKeys = keys;
await this.saveSettings(settings);
}
async findAll(): Promise<BearerKey[]> {
return await this.loadKeysWithMigration();
}
async findEnabled(): Promise<BearerKey[]> {
const keys = await this.loadKeysWithMigration();
return keys.filter((key) => key.enabled);
}
async findById(id: string): Promise<BearerKey | undefined> {
const keys = await this.loadKeysWithMigration();
return keys.find((key) => key.id === id);
}
async findByToken(token: string): Promise<BearerKey | undefined> {
const keys = await this.loadKeysWithMigration();
return keys.find((key) => key.token === token);
}
async create(data: Omit<BearerKey, 'id'>): Promise<BearerKey> {
const keys = await this.loadKeysWithMigration();
const newKey: BearerKey = {
id: randomUUID(),
...data,
};
keys.push(newKey);
await this.saveKeys(keys);
return newKey;
}
async update(id: string, data: Partial<Omit<BearerKey, 'id'>>): Promise<BearerKey | null> {
const keys = await this.loadKeysWithMigration();
const index = keys.findIndex((key) => key.id === id);
if (index === -1) {
return null;
}
const updated: BearerKey = {
...keys[index],
...data,
id: keys[index].id,
};
keys[index] = updated;
await this.saveKeys(keys);
return updated;
}
async delete(id: string): Promise<boolean> {
const keys = await this.loadKeysWithMigration();
const next = keys.filter((key) => key.id !== id);
if (next.length === keys.length) {
return false;
}
await this.saveKeys(next);
return true;
}
async updateServerName(oldName: string, newName: string): Promise<number> {
const keys = await this.loadKeysWithMigration();
let updatedCount = 0;
for (const key of keys) {
let updated = false;
if (key.allowedServers && key.allowedServers.length > 0) {
const newServers = key.allowedServers.map((server) => {
if (server === oldName) {
updated = true;
return newName;
}
return server;
});
if (updated) {
key.allowedServers = newServers;
updatedCount++;
}
}
}
if (updatedCount > 0) {
await this.saveKeys(keys);
}
return updatedCount;
}
}

View File

@@ -0,0 +1,103 @@
import { BearerKeyDao } from './BearerKeyDao.js';
import { BearerKey as BearerKeyModel } from '../types/index.js';
import { BearerKeyRepository } from '../db/repositories/BearerKeyRepository.js';
/**
* Database-backed implementation of BearerKeyDao
*/
export class BearerKeyDaoDbImpl implements BearerKeyDao {
private repository: BearerKeyRepository;
constructor() {
this.repository = new BearerKeyRepository();
}
private toModel(entity: import('../db/entities/BearerKey.js').BearerKey): BearerKeyModel {
return {
id: entity.id,
name: entity.name,
token: entity.token,
enabled: entity.enabled,
accessType: entity.accessType,
allowedGroups: entity.allowedGroups ?? [],
allowedServers: entity.allowedServers ?? [],
};
}
async findAll(): Promise<BearerKeyModel[]> {
const entities = await this.repository.findAll();
return entities.map((e) => this.toModel(e));
}
async findEnabled(): Promise<BearerKeyModel[]> {
const entities = await this.repository.findAll();
return entities.filter((e) => e.enabled).map((e) => this.toModel(e));
}
async findById(id: string): Promise<BearerKeyModel | undefined> {
const entity = await this.repository.findById(id);
return entity ? this.toModel(entity) : undefined;
}
async findByToken(token: string): Promise<BearerKeyModel | undefined> {
const entity = await this.repository.findByToken(token);
return entity ? this.toModel(entity) : undefined;
}
async create(data: Omit<BearerKeyModel, 'id'>): Promise<BearerKeyModel> {
const entity = await this.repository.create({
name: data.name,
token: data.token,
enabled: data.enabled,
accessType: data.accessType,
allowedGroups: data.allowedGroups ?? [],
allowedServers: data.allowedServers ?? [],
} as any);
return this.toModel(entity as any);
}
async update(
id: string,
data: Partial<Omit<BearerKeyModel, 'id'>>,
): Promise<BearerKeyModel | null> {
const entity = await this.repository.update(id, {
name: data.name,
token: data.token,
enabled: data.enabled,
accessType: data.accessType,
allowedGroups: data.allowedGroups,
allowedServers: data.allowedServers,
} as any);
return entity ? this.toModel(entity as any) : null;
}
async delete(id: string): Promise<boolean> {
return await this.repository.delete(id);
}
async updateServerName(oldName: string, newName: string): Promise<number> {
const allKeys = await this.repository.findAll();
let updatedCount = 0;
for (const key of allKeys) {
let updated = false;
if (key.allowedServers && key.allowedServers.length > 0) {
const newServers = key.allowedServers.map((server) => {
if (server === oldName) {
updated = true;
return newName;
}
return server;
});
if (updated) {
await this.repository.update(key.id, { allowedServers: newServers });
updatedCount++;
}
}
}
return updatedCount;
}
}

View File

@@ -5,6 +5,7 @@ import { SystemConfigDao, SystemConfigDaoImpl } from './SystemConfigDao.js';
import { UserConfigDao, UserConfigDaoImpl } from './UserConfigDao.js';
import { OAuthClientDao, OAuthClientDaoImpl } from './OAuthClientDao.js';
import { OAuthTokenDao, OAuthTokenDaoImpl } from './OAuthTokenDao.js';
import { BearerKeyDao, BearerKeyDaoImpl } from './BearerKeyDao.js';
/**
* DAO Factory interface for creating DAO instances
@@ -17,6 +18,7 @@ export interface DaoFactory {
getUserConfigDao(): UserConfigDao;
getOAuthClientDao(): OAuthClientDao;
getOAuthTokenDao(): OAuthTokenDao;
getBearerKeyDao(): BearerKeyDao;
}
/**
@@ -32,6 +34,7 @@ export class JsonFileDaoFactory implements DaoFactory {
private userConfigDao: UserConfigDao | null = null;
private oauthClientDao: OAuthClientDao | null = null;
private oauthTokenDao: OAuthTokenDao | null = null;
private bearerKeyDao: BearerKeyDao | null = null;
/**
* Get singleton instance
@@ -96,6 +99,13 @@ export class JsonFileDaoFactory implements DaoFactory {
return this.oauthTokenDao;
}
getBearerKeyDao(): BearerKeyDao {
if (!this.bearerKeyDao) {
this.bearerKeyDao = new BearerKeyDaoImpl();
}
return this.bearerKeyDao;
}
/**
* Reset all cached DAO instances (useful for testing)
*/
@@ -107,6 +117,7 @@ export class JsonFileDaoFactory implements DaoFactory {
this.userConfigDao = null;
this.oauthClientDao = null;
this.oauthTokenDao = null;
this.bearerKeyDao = null;
}
}
@@ -179,3 +190,7 @@ export function getOAuthClientDao(): OAuthClientDao {
export function getOAuthTokenDao(): OAuthTokenDao {
return getDaoFactory().getOAuthTokenDao();
}
export function getBearerKeyDao(): BearerKeyDao {
return getDaoFactory().getBearerKeyDao();
}

View File

@@ -7,6 +7,7 @@ import {
UserConfigDao,
OAuthClientDao,
OAuthTokenDao,
BearerKeyDao,
} from './index.js';
import { UserDaoDbImpl } from './UserDaoDbImpl.js';
import { ServerDaoDbImpl } from './ServerDaoDbImpl.js';
@@ -15,6 +16,7 @@ import { SystemConfigDaoDbImpl } from './SystemConfigDaoDbImpl.js';
import { UserConfigDaoDbImpl } from './UserConfigDaoDbImpl.js';
import { OAuthClientDaoDbImpl } from './OAuthClientDaoDbImpl.js';
import { OAuthTokenDaoDbImpl } from './OAuthTokenDaoDbImpl.js';
import { BearerKeyDaoDbImpl } from './BearerKeyDaoDbImpl.js';
/**
* Database-backed DAO factory implementation
@@ -29,6 +31,7 @@ export class DatabaseDaoFactory implements DaoFactory {
private userConfigDao: UserConfigDao | null = null;
private oauthClientDao: OAuthClientDao | null = null;
private oauthTokenDao: OAuthTokenDao | null = null;
private bearerKeyDao: BearerKeyDao | null = null;
/**
* Get singleton instance
@@ -93,6 +96,13 @@ export class DatabaseDaoFactory implements DaoFactory {
return this.oauthTokenDao!;
}
getBearerKeyDao(): BearerKeyDao {
if (!this.bearerKeyDao) {
this.bearerKeyDao = new BearerKeyDaoDbImpl();
}
return this.bearerKeyDao!;
}
/**
* Reset all cached DAO instances (useful for testing)
*/
@@ -104,5 +114,6 @@ export class DatabaseDaoFactory implements DaoFactory {
this.userConfigDao = null;
this.oauthClientDao = null;
this.oauthTokenDao = null;
this.bearerKeyDao = null;
}
}

View File

@@ -36,6 +36,11 @@ export interface GroupDao extends BaseDao<IGroup, string> {
* Find group by name
*/
findByName(name: string): Promise<IGroup | null>;
/**
* Update server name in all groups (when server is renamed)
*/
updateServerName(oldName: string, newName: string): Promise<number>;
}
/**
@@ -218,4 +223,39 @@ export class GroupDaoImpl extends JsonFileBaseDao implements GroupDao {
const groups = await this.getAll();
return groups.find((group) => group.name === name) || null;
}
async updateServerName(oldName: string, newName: string): Promise<number> {
const groups = await this.getAll();
let updatedCount = 0;
for (const group of groups) {
let updated = false;
const newServers = group.servers.map((server) => {
if (typeof server === 'string') {
if (server === oldName) {
updated = true;
return newName;
}
return server;
} else {
if (server.name === oldName) {
updated = true;
return { ...server, name: newName };
}
return server;
}
}) as IGroup['servers'];
if (updated) {
group.servers = newServers;
updatedCount++;
}
}
if (updatedCount > 0) {
await this.saveAll(groups);
}
return updatedCount;
}
}

View File

@@ -151,4 +151,35 @@ export class GroupDaoDbImpl implements GroupDao {
owner: group.owner,
};
}
async updateServerName(oldName: string, newName: string): Promise<number> {
const allGroups = await this.repository.findAll();
let updatedCount = 0;
for (const group of allGroups) {
let updated = false;
const newServers = group.servers.map((server) => {
if (typeof server === 'string') {
if (server === oldName) {
updated = true;
return newName;
}
return server;
} else {
if (server.name === oldName) {
updated = true;
return { ...server, name: newName };
}
return server;
}
});
if (updated) {
await this.update(group.id, { servers: newServers as any });
updatedCount++;
}
}
return updatedCount;
}
}

View File

@@ -2,10 +2,31 @@ import { ServerConfig } from '../types/index.js';
import { BaseDao } from './base/BaseDao.js';
import { JsonFileBaseDao } from './base/JsonFileBaseDao.js';
/**
* Pagination result interface
*/
export interface PaginatedResult<T> {
data: T[];
total: number;
page: number;
limit: number;
totalPages: number;
}
/**
* Server DAO interface with server-specific operations
*/
export interface ServerDao extends BaseDao<ServerConfigWithName, string> {
/**
* Find all servers with pagination
*/
findAllPaginated(page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>>;
/**
* Find servers by owner with pagination
*/
findByOwnerPaginated(owner: string, page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>>;
/**
* Find servers by owner
*/
@@ -41,6 +62,11 @@ export interface ServerDao extends BaseDao<ServerConfigWithName, string> {
name: string,
prompts: Record<string, { enabled: boolean; description?: string }>,
): Promise<boolean>;
/**
* Rename a server (change its name/key)
*/
rename(oldName: string, newName: string): Promise<boolean>;
}
/**
@@ -95,7 +121,8 @@ export class ServerDaoImpl extends JsonFileBaseDao implements ServerDao {
return {
...existing,
...updates,
name: existing.name, // Name should not be updated
// Keep the existing name unless explicitly updating via rename
name: updates.name ?? existing.name,
};
}
@@ -141,9 +168,7 @@ export class ServerDaoImpl extends JsonFileBaseDao implements ServerDao {
return null;
}
// Don't allow name changes
const { name: _, ...allowedUpdates } = updates;
const updatedServer = this.updateEntity(servers[index], allowedUpdates);
const updatedServer = this.updateEntity(servers[index], updates);
servers[index] = updatedServer;
await this.saveAll(servers);
@@ -172,6 +197,61 @@ export class ServerDaoImpl extends JsonFileBaseDao implements ServerDao {
return servers.length;
}
async findAllPaginated(page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>> {
const allServers = await this.getAll();
// Sort: enabled servers first, then by creation time
const sortedServers = allServers.sort((a, b) => {
const aEnabled = a.enabled !== false;
const bEnabled = b.enabled !== false;
if (aEnabled !== bEnabled) {
return aEnabled ? -1 : 1;
}
return 0; // Keep original order for same enabled status
});
const total = sortedServers.length;
const totalPages = Math.ceil(total / limit);
const startIndex = (page - 1) * limit;
const endIndex = startIndex + limit;
const data = sortedServers.slice(startIndex, endIndex);
return {
data,
total,
page,
limit,
totalPages,
};
}
async findByOwnerPaginated(owner: string, page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>> {
const allServers = await this.getAll();
const filteredServers = allServers.filter((server) => server.owner === owner);
// Sort: enabled servers first, then by creation time
const sortedServers = filteredServers.sort((a, b) => {
const aEnabled = a.enabled !== false;
const bEnabled = b.enabled !== false;
if (aEnabled !== bEnabled) {
return aEnabled ? -1 : 1;
}
return 0; // Keep original order for same enabled status
});
const total = sortedServers.length;
const totalPages = Math.ceil(total / limit);
const startIndex = (page - 1) * limit;
const endIndex = startIndex + limit;
const data = sortedServers.slice(startIndex, endIndex);
return {
data,
total,
page,
limit,
totalPages,
};
}
async findByOwner(owner: string): Promise<ServerConfigWithName[]> {
const servers = await this.getAll();
return servers.filter((server) => server.owner === owner);
@@ -207,4 +287,22 @@ export class ServerDaoImpl extends JsonFileBaseDao implements ServerDao {
const result = await this.update(name, { prompts });
return result !== null;
}
async rename(oldName: string, newName: string): Promise<boolean> {
const servers = await this.getAll();
const index = servers.findIndex((server) => server.name === oldName);
if (index === -1) {
return false;
}
// Check if newName already exists
if (servers.find((server) => server.name === newName)) {
throw new Error(`Server ${newName} already exists`);
}
servers[index] = { ...servers[index], name: newName };
await this.saveAll(servers);
return true;
}
}

View File

@@ -1,4 +1,4 @@
import { ServerDao, ServerConfigWithName } from './index.js';
import { ServerDao, ServerConfigWithName, PaginatedResult } from './index.js';
import { ServerRepository } from '../db/repositories/ServerRepository.js';
/**
@@ -16,6 +16,32 @@ export class ServerDaoDbImpl implements ServerDao {
return servers.map((s) => this.mapToServerConfig(s));
}
async findAllPaginated(page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>> {
const { data, total } = await this.repository.findAllPaginated(page, limit);
const totalPages = Math.ceil(total / limit);
return {
data: data.map((s) => this.mapToServerConfig(s)),
total,
page,
limit,
totalPages,
};
}
async findByOwnerPaginated(owner: string, page: number, limit: number): Promise<PaginatedResult<ServerConfigWithName>> {
const { data, total } = await this.repository.findByOwnerPaginated(owner, page, limit);
const totalPages = Math.ceil(total / limit);
return {
data: data.map((s) => this.mapToServerConfig(s)),
total,
page,
limit,
totalPages,
};
}
async findById(name: string): Promise<ServerConfigWithName | null> {
const server = await this.repository.findByName(name);
return server ? this.mapToServerConfig(server) : null;
@@ -38,6 +64,8 @@ export class ServerDaoDbImpl implements ServerDao {
prompts: entity.prompts,
options: entity.options,
oauth: entity.oauth,
proxy: entity.proxy,
openapi: entity.openapi,
});
return this.mapToServerConfig(server);
}
@@ -61,6 +89,8 @@ export class ServerDaoDbImpl implements ServerDao {
prompts: entity.prompts,
options: entity.options,
oauth: entity.oauth,
proxy: entity.proxy,
openapi: entity.openapi,
});
return server ? this.mapToServerConfig(server) : null;
}
@@ -113,6 +143,15 @@ export class ServerDaoDbImpl implements ServerDao {
return result !== null;
}
async rename(oldName: string, newName: string): Promise<boolean> {
// Check if newName already exists
if (await this.repository.exists(newName)) {
throw new Error(`Server ${newName} already exists`);
}
return await this.repository.rename(oldName, newName);
}
private mapToServerConfig(server: {
name: string;
type?: string;
@@ -129,6 +168,8 @@ export class ServerDaoDbImpl implements ServerDao {
prompts?: Record<string, { enabled: boolean; description?: string }>;
options?: Record<string, any>;
oauth?: Record<string, any>;
proxy?: Record<string, any>;
openapi?: Record<string, any>;
}): ServerConfigWithName {
return {
name: server.name,
@@ -146,6 +187,8 @@ export class ServerDaoDbImpl implements ServerDao {
prompts: server.prompts,
options: server.options,
oauth: server.oauth,
proxy: server.proxy,
openapi: server.openapi,
};
}
}

View File

@@ -8,6 +8,7 @@ export * from './SystemConfigDao.js';
export * from './UserConfigDao.js';
export * from './OAuthClientDao.js';
export * from './OAuthTokenDao.js';
export * from './BearerKeyDao.js';
// Export database implementations
export * from './UserDaoDbImpl.js';
@@ -17,6 +18,7 @@ export * from './SystemConfigDaoDbImpl.js';
export * from './UserConfigDaoDbImpl.js';
export * from './OAuthClientDaoDbImpl.js';
export * from './OAuthTokenDaoDbImpl.js';
export * from './BearerKeyDaoDbImpl.js';
// Export the DAO factory and convenience functions
export * from './DaoFactory.js';

View File

@@ -25,39 +25,44 @@ const createRequiredExtensions = async (dataSource: DataSource): Promise<void> =
};
// Get database URL from smart routing config or fallback to environment variable
const getDatabaseUrl = (): string => {
return getSmartRoutingConfig().dbUrl;
const getDatabaseUrl = async (): Promise<string> => {
return (await getSmartRoutingConfig()).dbUrl;
};
// Default database configuration
const defaultConfig: DataSourceOptions = {
type: 'postgres',
url: getDatabaseUrl(),
synchronize: true,
entities: entities,
subscribers: [VectorEmbeddingSubscriber],
// Default database configuration (without URL - will be set during initialization)
const getDefaultConfig = async (): Promise<DataSourceOptions> => {
return {
type: 'postgres',
url: await getDatabaseUrl(),
synchronize: true,
entities: entities,
subscribers: [VectorEmbeddingSubscriber],
};
};
// AppDataSource is the TypeORM data source
let appDataSource = new DataSource(defaultConfig);
// AppDataSource is the TypeORM data source (initialized with empty config, will be updated)
let appDataSource: DataSource | null = null;
// Global promise to track initialization status
let initializationPromise: Promise<DataSource> | null = null;
// Function to create a new DataSource with updated configuration
export const updateDataSourceConfig = (): DataSource => {
const newConfig: DataSourceOptions = {
...defaultConfig,
url: getDatabaseUrl(),
};
export const updateDataSourceConfig = async (): Promise<DataSource> => {
const newConfig = await getDefaultConfig();
// If the configuration has changed, we need to create a new DataSource
const currentUrl = (appDataSource.options as any).url;
if (currentUrl !== newConfig.url) {
console.log('Database URL configuration changed, updating DataSource...');
if (appDataSource) {
const currentUrl = (appDataSource.options as any).url;
const newUrl = (newConfig as any).url;
if (currentUrl !== newUrl) {
console.log('Database URL configuration changed, updating DataSource...');
appDataSource = new DataSource(newConfig);
// Reset initialization promise when configuration changes
initializationPromise = null;
}
} else {
// First time initialization
appDataSource = new DataSource(newConfig);
// Reset initialization promise when configuration changes
initializationPromise = null;
}
return appDataSource;
@@ -65,6 +70,9 @@ export const updateDataSourceConfig = (): DataSource => {
// Get the current AppDataSource instance
export const getAppDataSource = (): DataSource => {
if (!appDataSource) {
throw new Error('Database not initialized. Call initializeDatabase() first.');
}
return appDataSource;
};
@@ -72,7 +80,7 @@ export const getAppDataSource = (): DataSource => {
export const reconnectDatabase = async (): Promise<DataSource> => {
try {
// Close existing connection if it exists
if (appDataSource.isInitialized) {
if (appDataSource && appDataSource.isInitialized) {
console.log('Closing existing database connection...');
await appDataSource.destroy();
}
@@ -81,7 +89,7 @@ export const reconnectDatabase = async (): Promise<DataSource> => {
initializationPromise = null;
// Update configuration and reconnect
appDataSource = updateDataSourceConfig();
appDataSource = await updateDataSourceConfig();
return await initializeDatabase();
} catch (error) {
console.error('Error during database reconnection:', error);
@@ -98,7 +106,7 @@ export const initializeDatabase = async (): Promise<DataSource> => {
}
// If already initialized, return the existing instance
if (appDataSource.isInitialized) {
if (appDataSource && appDataSource.isInitialized) {
console.log('Database already initialized, returning existing instance');
return Promise.resolve(appDataSource);
}
@@ -122,7 +130,7 @@ export const initializeDatabase = async (): Promise<DataSource> => {
const performDatabaseInitialization = async (): Promise<DataSource> => {
try {
// Update configuration before initializing
appDataSource = updateDataSourceConfig();
appDataSource = await updateDataSourceConfig();
if (!appDataSource.isInitialized) {
console.log('Initializing database connection...');
@@ -250,7 +258,8 @@ const performDatabaseInitialization = async (): Promise<DataSource> => {
console.log('Database connection established successfully.');
// Run one final setup check after schema synchronization is done
if (defaultConfig.synchronize) {
const config = await getDefaultConfig();
if (config.synchronize) {
try {
console.log('Running final vector configuration check...');
@@ -325,12 +334,12 @@ const performDatabaseInitialization = async (): Promise<DataSource> => {
// Get database connection status
export const isDatabaseConnected = (): boolean => {
return appDataSource.isInitialized;
return appDataSource ? appDataSource.isInitialized : false;
};
// Close database connection
export const closeDatabase = async (): Promise<void> => {
if (appDataSource.isInitialized) {
if (appDataSource && appDataSource.isInitialized) {
await appDataSource.destroy();
console.log('Database connection closed.');
}

View File

@@ -0,0 +1,43 @@
import {
Entity,
Column,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
} from 'typeorm';
/**
* Bearer authentication key entity
* Stores multiple bearer keys with per-key enable/disable and scoped access control
*/
@Entity({ name: 'bearer_keys' })
export class BearerKey {
@PrimaryGeneratedColumn('uuid')
id: string;
@Column({ type: 'varchar', length: 100 })
name: string;
@Column({ type: 'varchar', length: 512 })
token: string;
@Column({ type: 'boolean', default: true })
enabled: boolean;
@Column({ type: 'varchar', length: 20, default: 'all' })
accessType: 'all' | 'groups' | 'servers' | 'custom';
@Column({ type: 'simple-json', nullable: true })
allowedGroups?: string[];
@Column({ type: 'simple-json', nullable: true })
allowedServers?: string[];
@CreateDateColumn({ name: 'created_at', type: 'timestamp' })
createdAt: Date;
@UpdateDateColumn({ name: 'updated_at', type: 'timestamp' })
updatedAt: Date;
}
export default BearerKey;

View File

@@ -59,6 +59,12 @@ export class Server {
@Column({ type: 'simple-json', nullable: true })
oauth?: Record<string, any>;
@Column({ type: 'simple-json', nullable: true })
proxy?: Record<string, any>;
@Column({ type: 'simple-json', nullable: true })
openapi?: Record<string, any>;
@CreateDateColumn({ name: 'created_at', type: 'timestamp' })
createdAt: Date;

View File

@@ -6,6 +6,7 @@ import SystemConfig from './SystemConfig.js';
import UserConfig from './UserConfig.js';
import OAuthClient from './OAuthClient.js';
import OAuthToken from './OAuthToken.js';
import BearerKey from './BearerKey.js';
// Export all entities
export default [
@@ -17,7 +18,18 @@ export default [
UserConfig,
OAuthClient,
OAuthToken,
BearerKey,
];
// Export individual entities for direct use
export { VectorEmbedding, User, Server, Group, SystemConfig, UserConfig, OAuthClient, OAuthToken };
export {
VectorEmbedding,
User,
Server,
Group,
SystemConfig,
UserConfig,
OAuthClient,
OAuthToken,
BearerKey,
};

View File

@@ -0,0 +1,75 @@
import { Repository } from 'typeorm';
import { BearerKey } from '../entities/BearerKey.js';
import { getAppDataSource } from '../connection.js';
/**
* Repository for BearerKey entity
*/
export class BearerKeyRepository {
private repository: Repository<BearerKey>;
constructor() {
this.repository = getAppDataSource().getRepository(BearerKey);
}
/**
* Find all bearer keys
*/
async findAll(): Promise<BearerKey[]> {
return await this.repository.find({ order: { createdAt: 'ASC' } });
}
/**
* Count bearer keys
*/
async count(): Promise<number> {
return await this.repository.count();
}
/**
* Find bearer key by id
*/
async findById(id: string): Promise<BearerKey | null> {
return await this.repository.findOne({ where: { id } });
}
/**
* Find bearer key by token value
*/
async findByToken(token: string): Promise<BearerKey | null> {
return await this.repository.findOne({ where: { token } });
}
/**
* Create a new bearer key
*/
async create(data: Omit<BearerKey, 'id' | 'createdAt' | 'updatedAt'>): Promise<BearerKey> {
const entity = this.repository.create(data);
return await this.repository.save(entity);
}
/**
* Update an existing bearer key
*/
async update(
id: string,
updates: Partial<Omit<BearerKey, 'id' | 'createdAt' | 'updatedAt'>>,
): Promise<BearerKey | null> {
const existing = await this.findById(id);
if (!existing) {
return null;
}
const merged = this.repository.merge(existing, updates);
return await this.repository.save(merged);
}
/**
* Delete a bearer key
*/
async delete(id: string): Promise<boolean> {
const result = await this.repository.delete({ id });
return (result.affected ?? 0) > 0;
}
}
export default BearerKeyRepository;

View File

@@ -69,6 +69,41 @@ export class ServerRepository {
return await this.repository.count();
}
/**
* Find servers with pagination
*/
async findAllPaginated(page: number, limit: number): Promise<{ data: Server[]; total: number }> {
const skip = (page - 1) * limit;
const [data, total] = await this.repository.findAndCount({
order: {
enabled: 'DESC', // Enabled servers first
createdAt: 'ASC' // Then by creation time
},
skip,
take: limit,
});
return { data, total };
}
/**
* Find servers by owner with pagination
*/
async findByOwnerPaginated(owner: string, page: number, limit: number): Promise<{ data: Server[]; total: number }> {
const skip = (page - 1) * limit;
const [data, total] = await this.repository.findAndCount({
where: { owner },
order: {
enabled: 'DESC', // Enabled servers first
createdAt: 'ASC' // Then by creation time
},
skip,
take: limit,
});
return { data, total };
}
/**
* Find servers by owner
*/
@@ -89,6 +124,19 @@ export class ServerRepository {
async setEnabled(name: string, enabled: boolean): Promise<Server | null> {
return await this.update(name, { enabled });
}
/**
* Rename a server
*/
async rename(oldName: string, newName: string): Promise<boolean> {
const server = await this.findByName(oldName);
if (!server) {
return false;
}
server.name = newName;
await this.repository.save(server);
return true;
}
}
export default ServerRepository;

View File

@@ -6,6 +6,7 @@ import { SystemConfigRepository } from './SystemConfigRepository.js';
import { UserConfigRepository } from './UserConfigRepository.js';
import { OAuthClientRepository } from './OAuthClientRepository.js';
import { OAuthTokenRepository } from './OAuthTokenRepository.js';
import { BearerKeyRepository } from './BearerKeyRepository.js';
// Export all repositories
export {
@@ -17,4 +18,5 @@ export {
UserConfigRepository,
OAuthClientRepository,
OAuthTokenRepository,
BearerKeyRepository,
};

View File

@@ -5,9 +5,15 @@ import defaultConfig from '../config/index.js';
import { JWT_SECRET } from '../config/jwt.js';
import { getToken } from '../models/OAuth.js';
import { isOAuthServerEnabled } from '../services/oauthServerService.js';
import { getBearerKeyDao } from '../dao/index.js';
import { BearerKey } from '../types/index.js';
const validateBearerAuth = (req: Request, routingConfig: any): boolean => {
if (!routingConfig.enableBearerAuth) {
const validateBearerAuth = async (req: Request): Promise<boolean> => {
const bearerKeyDao = getBearerKeyDao();
const enabledKeys = await bearerKeyDao.findEnabled();
// If there are no enabled keys, bearer auth via static keys is disabled
if (enabledKeys.length === 0) {
return false;
}
@@ -16,7 +22,21 @@ const validateBearerAuth = (req: Request, routingConfig: any): boolean => {
return false;
}
return authHeader.substring(7) === routingConfig.bearerAuthKey;
const token = authHeader.substring(7).trim();
if (!token) {
return false;
}
const matchingKey: BearerKey | undefined = enabledKeys.find((key) => key.token === token);
if (!matchingKey) {
console.warn('Bearer auth failed: token did not match any configured bearer key');
return false;
}
console.log(
`Bearer auth succeeded with key id=${matchingKey.id}, name=${matchingKey.name}, accessType=${matchingKey.accessType}`,
);
return true;
};
const readonlyAllowPaths = ['/tools/call/'];
@@ -47,8 +67,6 @@ export const auth = async (req: Request, res: Response, next: NextFunction): Pro
const routingConfig = loadSettings().systemConfig?.routing || {
enableGlobalRoute: true,
enableGroupNameRoute: true,
enableBearerAuth: false,
bearerAuthKey: '',
skipAuth: false,
};
@@ -57,8 +75,8 @@ export const auth = async (req: Request, res: Response, next: NextFunction): Pro
return;
}
// Check if bearer auth is enabled and validate it
if (validateBearerAuth(req, routingConfig)) {
// Check if bearer auth via configured keys can validate this request
if (await validateBearerAuth(req)) {
next();
return;
}

View File

@@ -6,6 +6,7 @@ import {
getAllSettings,
getServerConfig,
createServer,
batchCreateServers,
updateServer,
deleteServer,
toggleServer,
@@ -20,6 +21,7 @@ import {
getGroups,
getGroup,
createNewGroup,
batchCreateGroups,
updateExistingGroup,
deleteExistingGroup,
addServerToExistingGroup,
@@ -104,6 +106,12 @@ import {
updateClientConfiguration,
deleteClientRegistration,
} from '../controllers/oauthDynamicRegistrationController.js';
import {
getBearerKeys,
createBearerKey,
updateBearerKey,
deleteBearerKey,
} from '../controllers/bearerKeyController.js';
import { auth } from '../middlewares/auth.js';
const router = express.Router();
@@ -134,6 +142,7 @@ export const initRoutes = (app: express.Application): void => {
router.get('/servers/:name', getServerConfig);
router.get('/settings', getAllSettings);
router.post('/servers', createServer);
router.post('/servers/batch', batchCreateServers);
router.put('/servers/:name', updateServer);
router.delete('/servers/:name', deleteServer);
router.post('/servers/:name/toggle', toggleServer);
@@ -148,6 +157,7 @@ export const initRoutes = (app: express.Application): void => {
router.get('/groups', getGroups);
router.get('/groups/:id', getGroup);
router.post('/groups', createNewGroup);
router.post('/groups/batch', batchCreateGroups);
router.put('/groups/:id', updateExistingGroup);
router.delete('/groups/:id', deleteExistingGroup);
router.post('/groups/:id/servers', addServerToExistingGroup);
@@ -183,6 +193,12 @@ export const initRoutes = (app: express.Application): void => {
router.delete('/oauth/clients/:clientId', deleteClient);
router.post('/oauth/clients/:clientId/regenerate-secret', regenerateSecret);
// Bearer authentication key management (admin only)
router.get('/auth/keys', getBearerKeys);
router.post('/auth/keys', createBearerKey);
router.put('/auth/keys/:id', updateBearerKey);
router.delete('/auth/keys/:id', deleteBearerKey);
// Tool management routes
router.post('/tools/call/:server', callTool);

View File

@@ -29,9 +29,9 @@ export const getGroupByIdOrName = async (key: string): Promise<IGroup | undefine
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
const routingConfig = systemConfig?.routing || {
enableGlobalRoute: true,
enableGroupNameRoute: true,
const routingConfig = {
enableGlobalRoute: systemConfig?.routing?.enableGlobalRoute ?? true,
enableGroupNameRoute: systemConfig?.routing?.enableGroupNameRoute ?? true,
};
const groups = await getAllGroups();

View File

@@ -48,7 +48,9 @@ export const setupClientKeepAlive = async (
await (serverInfo.client as any).ping();
console.log(`Keep-alive ping successful for server: ${serverInfo.name}`);
} else {
await serverInfo.client.listTools({ timeout: 5000 }).catch(() => void 0);
await serverInfo.client
.listTools({}, { ...(serverInfo.options || {}), timeout: 5000 })
.catch(() => void 0);
}
}
} catch (error) {

View File

@@ -325,7 +325,7 @@ export class MCPHubOAuthProvider implements OAuthClientProvider {
return;
}
console.log(`Saving OAuth tokens for server: ${this.serverName}`);
console.log(`Saving OAuth tokens: ${JSON.stringify(tokens)} for server: ${this.serverName}`);
const updatedConfig = await persistTokens(this.serverName, {
accessToken: tokens.access_token,

View File

@@ -1,4 +1,6 @@
import os from 'os';
import path from 'path';
import fs from 'fs';
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import {
CallToolRequestSchema,
@@ -14,7 +16,8 @@ import {
StreamableHTTPClientTransport,
StreamableHTTPClientTransportOptions,
} from '@modelcontextprotocol/sdk/client/streamableHttp.js';
import { ServerInfo, ServerConfig, Tool } from '../types/index.js';
import { createFetchWithProxy, getProxyConfigFromEnv } from './proxy.js';
import { ServerInfo, ServerConfig, Tool, ProxychainsConfig } from '../types/index.js';
import { expandEnvVars, replaceEnvVars, getNameSeparator } from '../config/index.js';
import config from '../config/index.js';
import { getGroup } from './sseService.js';
@@ -31,6 +34,150 @@ const servers: { [sessionId: string]: Server } = {};
import { setupClientKeepAlive } from './keepAliveService.js';
/**
* Check if proxychains4 is available on the system (Linux/macOS only).
* Returns the path to proxychains4 if found, null otherwise.
*/
const findProxychains4 = (): string | null => {
// Windows is not supported
if (process.platform === 'win32') {
return null;
}
// Common proxychains4 binary paths
const possiblePaths = [
'/usr/bin/proxychains4',
'/usr/local/bin/proxychains4',
'/opt/homebrew/bin/proxychains4', // macOS Homebrew ARM
'/usr/local/Cellar/proxychains-ng/*/bin/proxychains4', // macOS Homebrew Intel
];
for (const p of possiblePaths) {
if (fs.existsSync(p)) {
return p;
}
}
// Try to find in PATH
const pathEnv = process.env.PATH || '';
const pathDirs = pathEnv.split(path.delimiter);
for (const dir of pathDirs) {
const fullPath = path.join(dir, 'proxychains4');
if (fs.existsSync(fullPath)) {
return fullPath;
}
}
return null;
};
/**
* Generate a temporary proxychains4 configuration file.
* Returns the path to the generated config file.
*/
const generateProxychainsConfig = (
serverName: string,
proxyConfig: ProxychainsConfig,
): string | null => {
// If a custom config path is provided, use it directly
if (proxyConfig.configPath) {
if (fs.existsSync(proxyConfig.configPath)) {
return proxyConfig.configPath;
}
console.warn(
`[${serverName}] Custom proxychains config not found: ${proxyConfig.configPath}`,
);
return null;
}
// Validate required fields
if (!proxyConfig.host || !proxyConfig.port) {
console.warn(`[${serverName}] Proxy host and port are required for proxychains4`);
return null;
}
const proxyType = proxyConfig.type || 'socks5';
const proxyLine = proxyConfig.username && proxyConfig.password
? `${proxyType} ${proxyConfig.host} ${proxyConfig.port} ${proxyConfig.username} ${proxyConfig.password}`
: `${proxyType} ${proxyConfig.host} ${proxyConfig.port}`;
const configContent = `# Proxychains4 configuration for MCP server: ${serverName}
# Generated by MCPHub
strict_chain
proxy_dns
remote_dns_subnet 224
tcp_read_time_out 15000
tcp_connect_time_out 8000
[ProxyList]
${proxyLine}
`;
// Create temp directory if needed
const tempDir = path.join(os.tmpdir(), 'mcphub-proxychains');
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
// Write config file
const configPath = path.join(tempDir, `${serverName.replace(/[^a-zA-Z0-9-_]/g, '_')}.conf`);
fs.writeFileSync(configPath, configContent, 'utf-8');
console.log(`[${serverName}] Generated proxychains4 config: ${configPath}`);
return configPath;
};
/**
* Wrap a command with proxychains4 if proxy is configured and available.
* Returns modified command and args if proxychains4 is used, original values otherwise.
*/
const wrapWithProxychains = (
serverName: string,
command: string,
args: string[],
proxyConfig?: ProxychainsConfig,
): { command: string; args: string[] } => {
// Skip if proxy is not enabled or not configured
if (!proxyConfig?.enabled) {
return { command, args };
}
// Check platform - Windows is not supported
if (process.platform === 'win32') {
console.warn(
`[${serverName}] proxychains4 proxy is not supported on Windows, ignoring proxy configuration`,
);
return { command, args };
}
// Find proxychains4 binary
const proxychains4Path = findProxychains4();
if (!proxychains4Path) {
console.warn(
`[${serverName}] proxychains4 not found on system, install it with: apt install proxychains4 (Debian/Ubuntu) or brew install proxychains-ng (macOS)`,
);
return { command, args };
}
// Generate or get config file
const configPath = generateProxychainsConfig(serverName, proxyConfig);
if (!configPath) {
console.warn(`[${serverName}] Failed to setup proxychains4 configuration, skipping proxy`);
return { command, args };
}
// Wrap command with proxychains4
console.log(
`[${serverName}] Using proxychains4 proxy: ${proxyConfig.type || 'socks5'}://${proxyConfig.host}:${proxyConfig.port}`,
);
return {
command: proxychains4Path,
args: ['-f', configPath, command, ...args],
};
};
export const initUpstreamServers = async (): Promise<void> => {
// Initialize OAuth clients for servers with dynamic registration
await initializeAllOAuthClients();
@@ -134,6 +281,10 @@ export const cleanupAllServers = (): void => {
// Helper function to create transport based on server configuration
export const createTransportFromConfig = async (name: string, conf: ServerConfig): Promise<any> => {
let transport;
const env: Record<string, string> = {
...(process.env as Record<string, string>),
...replaceEnvVars(conf.env || {}),
};
if (conf.type === 'streamable-http') {
const options: StreamableHTTPClientTransportOptions = {};
@@ -152,6 +303,8 @@ export const createTransportFromConfig = async (name: string, conf: ServerConfig
console.log(`OAuth provider configured for server: ${name}`);
}
options.fetch = createFetchWithProxy(getProxyConfigFromEnv(env));
transport = new StreamableHTTPClientTransport(new URL(conf.url || ''), options);
} else if (conf.url) {
// SSE transport
@@ -174,13 +327,11 @@ export const createTransportFromConfig = async (name: string, conf: ServerConfig
console.log(`OAuth provider configured for server: ${name}`);
}
options.fetch = createFetchWithProxy(getProxyConfigFromEnv(env));
transport = new SSEClientTransport(new URL(conf.url), options);
} else if (conf.command && conf.args) {
// Stdio transport
const env: Record<string, string> = {
...(process.env as Record<string, string>),
...replaceEnvVars(conf.env || {}),
};
env['PATH'] = expandEnvVars(process.env.PATH as string) || '';
const systemConfigDao = getSystemConfigDao();
@@ -204,11 +355,19 @@ export const createTransportFromConfig = async (name: string, conf: ServerConfig
env['npm_config_registry'] = systemConfig.install.npmRegistry;
}
// Expand environment variables in command
// Apply proxychains4 wrapper if proxy is configured (Linux/macOS only)
const { command: finalCommand, args: finalArgs } = wrapWithProxychains(
name,
conf.command,
replaceEnvVars(conf.args) as string[],
conf.proxy,
);
// Create STDIO transport with potentially wrapped command
transport = new StdioClientTransport({
cwd: os.homedir(),
command: conf.command,
args: replaceEnvVars(conf.args) as string[],
command: finalCommand,
args: finalArgs,
env: env,
stderr: 'pipe',
});
@@ -236,6 +395,8 @@ const callToolWithReconnect = async (
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
const result = await serverInfo.client.callTool(toolParams, undefined, options || {});
// Check auth error
checkAuthError(result);
return result;
} catch (error: any) {
// Check if error message starts with "Error POSTing to endpoint (HTTP 40"
@@ -611,14 +772,65 @@ export const registerAllTools = async (isInit: boolean, serverName?: string): Pr
};
// Get all server information
export const getServersInfo = async (): Promise<Omit<ServerInfo, 'client' | 'transport'>[]> => {
const allServers: ServerConfigWithName[] = await getServerDao().findAll();
export const getServersInfo = async (
page?: number,
limit?: number,
user?: any,
): Promise<Omit<ServerInfo, 'client' | 'transport'>[]> => {
const dataService = getDataService();
const filterServerInfos: ServerInfo[] = dataService.filterData
? dataService.filterData(serverInfos)
: serverInfos;
const infos = filterServerInfos.map(
({ name, status, tools, prompts, createTime, error, oauth }) => {
// Get paginated or all server configurations from DAO
// If pagination is used with a non-admin user, filtering is already done at DAO level
const isPaginated = limit !== undefined && page !== undefined;
const allServers: ServerConfigWithName[] = isPaginated
? (await getServerDao().findAllPaginated(page, limit)).data
: await getServerDao().findAll();
// Ensure that servers recently added via DAO but not yet initialized in serverInfos
// are still visible in the servers list. This avoids a race condition where
// a POST /api/servers immediately followed by GET /api/servers would not
// return the newly created server until background initialization completes.
const combinedServerInfos: ServerInfo[] = [...serverInfos];
const existingNames = new Set(combinedServerInfos.map((s) => s.name));
// Create a set of server names we're interested in (for pagination)
const requestedServerNames = new Set(allServers.map((s) => s.name));
// Filter serverInfos to only include requested servers if pagination is used
const filteredServerInfos = isPaginated
? combinedServerInfos.filter((s) => requestedServerNames.has(s.name))
: combinedServerInfos;
// Add servers from DAO that don't have runtime info yet
for (const server of allServers) {
if (!existingNames.has(server.name)) {
const isEnabled = server.enabled === undefined ? true : server.enabled;
filteredServerInfos.push({
name: server.name,
owner: server.owner,
// Newly created servers that are enabled should appear as "connecting"
// until the MCP client initialization completes. Disabled servers remain
// in the "disconnected" state.
status: isEnabled ? 'connecting' : 'disconnected',
error: null,
tools: [],
prompts: [],
createTime: Date.now(),
enabled: isEnabled,
});
}
}
// Apply user filtering only when NOT using pagination (pagination already filtered at DAO level)
// Or when no pagination parameters provided (backward compatibility)
const shouldApplyUserFilter = !isPaginated;
const filterServerInfos: ServerInfo[] = shouldApplyUserFilter && dataService.filterData
? dataService.filterData(filteredServerInfos, user)
: filteredServerInfos;
const infos = filterServerInfos
.filter((info) => requestedServerNames.has(info.name)) // Only include requested servers
.map(({ name, status, tools, prompts, createTime, error, oauth }) => {
const serverConfig = allServers.find((server) => server.name === name);
const enabled = serverConfig ? serverConfig.enabled !== false : true;
@@ -657,12 +869,8 @@ export const getServersInfo = async (): Promise<Omit<ServerInfo, 'client' | 'tra
}
: undefined,
};
},
);
infos.sort((a, b) => {
if (a.enabled === b.enabled) return 0;
return a.enabled ? -1 : 1;
});
});
// Sorting is now handled at DAO layer for consistent pagination results
return infos;
};
@@ -797,6 +1005,25 @@ export const addOrUpdateServer = async (
}
};
// Check for authentication error in tool call result
function checkAuthError(result: any) {
if (Array.isArray(result.content) && result.content.length > 0) {
const text = result.content[0]?.text;
if (typeof text === 'string') {
let errorContent;
try {
errorContent = JSON.parse(text);
} catch (e) {
// Ignore JSON parse errors and continue
return;
}
if (errorContent.code === 401) {
throw new Error('Error POSTing to endpoint (HTTP 401 Unauthorized)');
}
}
}
}
// Close server client and transport
function closeServer(name: string) {
const serverInfo = serverInfos.find((serverInfo) => serverInfo.name === name);

View File

@@ -42,7 +42,7 @@ function convertToolSchemaToOpenAPI(tool: Tool): {
(prop: any) =>
prop.type === 'object' ||
prop.type === 'array' ||
(prop.type === 'string' && prop.enum && prop.enum.length > 10),
prop.type === 'string',
);
if (!hasComplexTypes && Object.keys(properties).length <= 10) {
@@ -93,7 +93,7 @@ function generateOperationFromTool(tool: Tool, serverName: string): OpenAPIV3.Op
const operation: OpenAPIV3.OperationObject = {
summary: tool.description || `Execute ${tool.name} tool`,
description: tool.description || `Execute the ${tool.name} tool from ${serverName} server`,
operationId: `${serverName}_${tool.name}`,
operationId: `${tool.name}`,
tags: [serverName],
...(parameters && parameters.length > 0 && { parameters }),
...(requestBody && { requestBody }),

167
src/services/proxy.ts Normal file
View File

@@ -0,0 +1,167 @@
/**
* HTTP/HTTPS proxy configuration utilities for MCP client transports.
*
* This module provides utilities to configure HTTP and HTTPS proxies when
* connecting to MCP servers. Proxies are configured by providing a custom
* fetch implementation that uses Node.js http/https agents with proxy support.
*
*/
import { FetchLike } from '@modelcontextprotocol/sdk/shared/transport.js';
/**
* Configuration options for HTTP/HTTPS proxy settings.
*/
export interface ProxyConfig {
/**
* HTTP proxy URL (e.g., 'http://proxy.example.com:8080')
* Can include authentication: 'http://user:pass@proxy.example.com:8080'
*/
httpProxy?: string;
/**
* HTTPS proxy URL (e.g., 'https://proxy.example.com:8443')
* Can include authentication: 'https://user:pass@proxy.example.com:8443'
*/
httpsProxy?: string;
/**
* Comma-separated list of hosts that should bypass the proxy
* (e.g., 'localhost,127.0.0.1,.example.com')
*/
noProxy?: string;
}
/**
* Creates a fetch function that uses the specified proxy configuration.
*
* This function returns a fetch implementation that routes requests through
* the configured HTTP/HTTPS proxies using undici's ProxyAgent.
*
* Note: This function requires the 'undici' package to be installed.
* Install it with: npm install undici
*
* @param config - Proxy configuration options
* @returns A fetch-compatible function configured to use the specified proxies
*
*/
export function createFetchWithProxy(config: ProxyConfig): FetchLike {
// If no proxy is configured, return the default fetch
if (!config.httpProxy && !config.httpsProxy) {
return fetch;
}
// Parse no_proxy list
const noProxyList = parseNoProxy(config.noProxy);
return async (url: string | URL, init?: RequestInit): Promise<Response> => {
const targetUrl = typeof url === 'string' ? new URL(url) : url;
// Check if host should bypass proxy
if (shouldBypassProxy(targetUrl.hostname, noProxyList)) {
return fetch(url, init);
}
// Determine which proxy to use based on protocol
const proxyUrl = targetUrl.protocol === 'https:' ? config.httpsProxy : config.httpProxy;
if (!proxyUrl) {
// No proxy configured for this protocol
return fetch(url, init);
}
// Use undici for proxy support if available
try {
// Dynamic import - undici is an optional peer dependency
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const undici = await import('undici' as any);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const ProxyAgent = (undici as any).ProxyAgent;
const dispatcher = new ProxyAgent(proxyUrl);
return fetch(url, {
...init,
// @ts-expect-error - dispatcher is undici-specific
dispatcher,
});
} catch (error) {
// undici not available - throw error requiring installation
throw new Error(
'Proxy support requires the "undici" package. ' +
'Install it with: npm install undici\n' +
`Original error: ${error instanceof Error ? error.message : String(error)}`,
);
}
};
}
/**
* Parses a NO_PROXY environment variable value into a list of patterns.
*/
function parseNoProxy(noProxy?: string): string[] {
if (!noProxy) {
return [];
}
return noProxy
.split(',')
.map((item) => item.trim())
.filter((item) => item.length > 0);
}
/**
* Checks if a hostname should bypass the proxy based on NO_PROXY patterns.
*/
function shouldBypassProxy(hostname: string, noProxyList: string[]): boolean {
if (noProxyList.length === 0) {
return false;
}
const hostnameLower = hostname.toLowerCase();
for (const pattern of noProxyList) {
const patternLower = pattern.toLowerCase();
// Exact match
if (hostnameLower === patternLower) {
return true;
}
// Domain suffix match (e.g., .example.com matches sub.example.com)
if (patternLower.startsWith('.') && hostnameLower.endsWith(patternLower)) {
return true;
}
// Domain suffix match without leading dot
if (!patternLower.startsWith('.') && hostnameLower.endsWith('.' + patternLower)) {
return true;
}
// Special case: "*" matches everything
if (patternLower === '*') {
return true;
}
}
return false;
}
/**
* Creates a ProxyConfig from environment variables.
*
* This function reads standard proxy environment variables:
* - HTTP_PROXY, http_proxy
* - HTTPS_PROXY, https_proxy
* - NO_PROXY, no_proxy
*
* Lowercase versions take precedence over uppercase versions.
*
* @returns A ProxyConfig object populated from environment variables
*/
export function getProxyConfigFromEnv(env: Record<string, string>): ProxyConfig {
return {
httpProxy: env.http_proxy || env.HTTP_PROXY,
httpsProxy: env.https_proxy || env.HTTPS_PROXY,
noProxy: env.no_proxy || env.NO_PROXY,
};
}

View File

@@ -47,6 +47,30 @@ jest.mock('../dao/index.js', () => ({
getSystemConfigDao: jest.fn(() => ({
get: jest.fn().mockImplementation(() => Promise.resolve(currentSystemConfig)),
})),
getBearerKeyDao: jest.fn(() => ({
// Keep these unit tests aligned with legacy routing semantics:
// enableBearerAuth + bearerAuthKey -> one enabled key (token=bearerAuthKey)
// otherwise -> no enabled keys (bearer auth effectively disabled)
findEnabled: jest.fn().mockImplementation(async () => {
const routing = (currentSystemConfig as any)?.routing || {};
const enabled = !!routing.enableBearerAuth;
const token = String(routing.bearerAuthKey || '').trim();
if (!enabled || !token) {
return [];
}
return [
{
id: 'test-key-id',
name: 'default',
token,
enabled: true,
accessType: 'all',
allowedGroups: [],
allowedServers: [],
},
];
}),
})),
}));
// Mock oauthBearer

View File

@@ -6,10 +6,10 @@ import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/
import { isInitializeRequest } from '@modelcontextprotocol/sdk/types.js';
import { deleteMcpServer, getMcpServer } from './mcpService.js';
import config from '../config/index.js';
import { getSystemConfigDao } from '../dao/index.js';
import { getBearerKeyDao, getGroupDao, getServerDao, getSystemConfigDao } from '../dao/index.js';
import { UserContextService } from './userContextService.js';
import { RequestContextService } from './requestContextService.js';
import { IUser } from '../types/index.js';
import { IUser, BearerKey } from '../types/index.js';
import { resolveOAuthUserFromToken } from '../utils/oauthBearer.js';
export const transports: {
@@ -30,40 +30,187 @@ type BearerAuthResult =
reason: 'missing' | 'invalid';
};
/**
* Check if a string is a valid UUID v4 format
*/
const isValidUUID = (str: string): boolean => {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
return uuidRegex.test(str);
};
const isBearerKeyAllowedForRequest = async (req: Request, key: BearerKey): Promise<boolean> => {
const paramValue = (req.params as any)?.group as string | undefined;
// accessType 'all' allows all requests
if (key.accessType === 'all') {
return true;
}
// No parameter value means global route
if (!paramValue) {
// Only accessType 'all' allows global routes
return false;
}
try {
const groupDao = getGroupDao();
const serverDao = getServerDao();
// Step 1: Try to match as a group (by name or id), since group has higher priority
let matchedGroup = await groupDao.findByName(paramValue);
if (!matchedGroup && isValidUUID(paramValue)) {
// Only try findById if the parameter is a valid UUID
matchedGroup = await groupDao.findById(paramValue);
}
if (matchedGroup) {
// Matched as a group
if (key.accessType === 'groups') {
// For group-scoped keys, check if the matched group is in allowedGroups
const allowedGroups = key.allowedGroups || [];
return allowedGroups.includes(matchedGroup.name) || allowedGroups.includes(matchedGroup.id);
}
if (key.accessType === 'servers') {
// For server-scoped keys, check if any server in the group is allowed
const allowedServers = key.allowedServers || [];
if (allowedServers.length === 0) {
return false;
}
if (!Array.isArray(matchedGroup.servers)) {
return false;
}
const groupServerNames = matchedGroup.servers.map((server) =>
typeof server === 'string' ? server : server.name,
);
return groupServerNames.some((name) => allowedServers.includes(name));
}
if (key.accessType === 'custom') {
// For custom-scoped keys, check if the group is allowed OR if any server in the group is allowed
const allowedGroups = key.allowedGroups || [];
const allowedServers = key.allowedServers || [];
// Check if the group itself is allowed
const groupAllowed =
allowedGroups.includes(matchedGroup.name) || allowedGroups.includes(matchedGroup.id);
if (groupAllowed) {
return true;
}
// Check if any server in the group is allowed
if (allowedServers.length > 0 && Array.isArray(matchedGroup.servers)) {
const groupServerNames = matchedGroup.servers.map((server) =>
typeof server === 'string' ? server : server.name,
);
return groupServerNames.some((name) => allowedServers.includes(name));
}
return false;
}
// Unknown accessType with matched group
return false;
}
// Step 2: Not a group, try to match as a server name
const matchedServer = await serverDao.findById(paramValue);
if (matchedServer) {
// Matched as a server
if (key.accessType === 'groups') {
// For group-scoped keys, server access is not allowed
return false;
}
if (key.accessType === 'servers' || key.accessType === 'custom') {
// For server-scoped or custom-scoped keys, check if the server is in allowedServers
const allowedServers = key.allowedServers || [];
return allowedServers.includes(matchedServer.name);
}
// Unknown accessType with matched server
return false;
}
// Step 3: Not a valid group or server, deny access
console.warn(
`Bearer key access denied: parameter '${paramValue}' does not match any group or server`,
);
return false;
} catch (error) {
console.error('Error checking bearer key request access:', error);
return false;
}
};
const validateBearerAuth = async (req: Request): Promise<BearerAuthResult> => {
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
const routingConfig = systemConfig?.routing || {
enableGlobalRoute: true,
enableGroupNameRoute: true,
enableBearerAuth: false,
bearerAuthKey: '',
};
const bearerKeyDao = getBearerKeyDao();
const enabledKeys = await bearerKeyDao.findEnabled();
if (routingConfig.enableBearerAuth) {
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith('Bearer ')) {
return { valid: false, reason: 'missing' };
const authHeader = req.headers.authorization;
const hasBearerHeader = !!authHeader && authHeader.startsWith('Bearer ');
// If no enabled keys are configured, bearer auth is effectively disabled.
// We still allow OAuth bearer tokens to attach user context in this case.
if (enabledKeys.length === 0) {
if (!hasBearerHeader) {
return { valid: true };
}
const token = authHeader.substring(7); // Remove "Bearer " prefix
if (token.trim().length === 0) {
return { valid: false, reason: 'missing' };
}
if (token === routingConfig.bearerAuthKey) {
const token = authHeader!.substring(7).trim();
if (!token) {
return { valid: true };
}
const oauthUser = await resolveOAuthUserFromToken(token);
if (oauthUser) {
console.log('Authenticated request using OAuth bearer token without configured keys');
return { valid: true, user: oauthUser };
}
return { valid: false, reason: 'invalid' };
// When there are no keys, a non-OAuth bearer token should not block access
return { valid: true };
}
return { valid: true };
// When keys exist, bearer header is required
if (!hasBearerHeader) {
return { valid: false, reason: 'missing' };
}
const token = authHeader!.substring(7).trim();
if (!token) {
return { valid: false, reason: 'missing' };
}
// First, try to match a configured bearer key
const matchingKey = enabledKeys.find((key) => key.token === token);
if (matchingKey) {
const allowed = await isBearerKeyAllowedForRequest(req, matchingKey);
if (!allowed) {
console.warn(
`Bearer key rejected due to scope restrictions: id=${matchingKey.id}, name=${matchingKey.name}, accessType=${matchingKey.accessType}`,
);
return { valid: false, reason: 'invalid' };
}
console.log(
`Bearer key authenticated: id=${matchingKey.id}, name=${matchingKey.name}, accessType=${matchingKey.accessType}`,
);
return { valid: true };
}
// Fallback: treat token as potential OAuth access token
const oauthUser = await resolveOAuthUserFromToken(token);
if (oauthUser) {
console.log('Authenticated request using OAuth bearer token (no matching static key)');
return { valid: true, user: oauthUser };
}
console.warn('Bearer authentication failed: token did not match any key or OAuth user');
return { valid: false, reason: 'invalid' };
};
const attachUserContextFromBearer = (result: BearerAuthResult, res: Response): void => {
@@ -398,9 +545,9 @@ export const handleMcpPostRequest = async (req: Request, res: Response): Promise
// Get filtered settings based on user context (after setting user context)
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
const routingConfig = systemConfig?.routing || {
enableGlobalRoute: true,
enableGroupNameRoute: true,
const routingConfig = {
enableGlobalRoute: systemConfig?.routing?.enableGlobalRoute ?? true,
enableGroupNameRoute: systemConfig?.routing?.enableGroupNameRoute ?? true,
};
if (!group && !routingConfig.enableGlobalRoute) {
res.status(403).send('Global routes are disabled. Please specify a group ID.');

View File

@@ -1,13 +1,13 @@
import { getRepositoryFactory } from '../db/index.js';
import { VectorEmbeddingRepository } from '../db/repositories/index.js';
import { Tool } from '../types/index.js';
import { getAppDataSource, initializeDatabase } from '../db/connection.js';
import { getAppDataSource, isDatabaseConnected, initializeDatabase } from '../db/connection.js';
import { getSmartRoutingConfig } from '../utils/smartRouting.js';
import OpenAI from 'openai';
// Get OpenAI configuration from smartRouting settings or fallback to environment variables
const getOpenAIConfig = () => {
const smartRoutingConfig = getSmartRoutingConfig();
const getOpenAIConfig = async () => {
const smartRoutingConfig = await getSmartRoutingConfig();
return {
apiKey: smartRoutingConfig.openaiApiKey,
baseURL: smartRoutingConfig.openaiApiBaseUrl,
@@ -34,8 +34,8 @@ const getDimensionsForModel = (model: string): number => {
};
// Initialize the OpenAI client with smartRouting configuration
const getOpenAIClient = () => {
const config = getOpenAIConfig();
const getOpenAIClient = async () => {
const config = await getOpenAIConfig();
return new OpenAI({
apiKey: config.apiKey, // Get API key from smartRouting settings or environment variables
baseURL: config.baseURL, // Get base URL from smartRouting settings or fallback to default
@@ -53,32 +53,26 @@ const getOpenAIClient = () => {
* @returns Promise with vector embedding as number array
*/
async function generateEmbedding(text: string): Promise<number[]> {
try {
const config = getOpenAIConfig();
const openai = getOpenAIClient();
const config = await getOpenAIConfig();
const openai = await getOpenAIClient();
// Check if API key is configured
if (!openai.apiKey) {
console.warn('OpenAI API key is not configured. Using fallback embedding method.');
return generateFallbackEmbedding(text);
}
// Truncate text if it's too long (OpenAI has token limits)
const truncatedText = text.length > 8000 ? text.substring(0, 8000) : text;
// Call OpenAI's embeddings API
const response = await openai.embeddings.create({
model: config.embeddingModel, // Modern model with better performance
input: truncatedText,
});
// Return the embedding
return response.data[0].embedding;
} catch (error) {
console.error('Error generating embedding:', error);
console.warn('Falling back to simple embedding method');
// Check if API key is configured
if (!openai.apiKey) {
console.warn('OpenAI API key is not configured. Using fallback embedding method.');
return generateFallbackEmbedding(text);
}
// Truncate text if it's too long (OpenAI has token limits)
const truncatedText = text.length > 8000 ? text.substring(0, 8000) : text;
// Call OpenAI's embeddings API
const response = await openai.embeddings.create({
model: config.embeddingModel, // Modern model with better performance
input: truncatedText,
});
// Return the embedding
return response.data[0].embedding;
}
/**
@@ -198,12 +192,18 @@ export const saveToolsAsVectorEmbeddings = async (
return;
}
const smartRoutingConfig = getSmartRoutingConfig();
const smartRoutingConfig = await getSmartRoutingConfig();
if (!smartRoutingConfig.enabled) {
return;
}
const config = getOpenAIConfig();
// Ensure database is initialized before using repository
if (!isDatabaseConnected()) {
console.info('Database not initialized, initializing...');
await initializeDatabase();
}
const config = await getOpenAIConfig();
const vectorRepository = getRepositoryFactory(
'vectorEmbeddings',
)() as VectorEmbeddingRepository;
@@ -227,36 +227,31 @@ export const saveToolsAsVectorEmbeddings = async (
.filter(Boolean)
.join(' ');
try {
// Generate embedding
const embedding = await generateEmbedding(searchableText);
// Generate embedding
const embedding = await generateEmbedding(searchableText);
// Check database compatibility before saving
await checkDatabaseVectorDimensions(embedding.length);
// Check database compatibility before saving
await checkDatabaseVectorDimensions(embedding.length);
// Save embedding
await vectorRepository.saveEmbedding(
'tool',
`${serverName}:${tool.name}`,
searchableText,
embedding,
{
serverName,
toolName: tool.name,
description: tool.description,
inputSchema: tool.inputSchema,
},
config.embeddingModel, // Store the model used for this embedding
);
} catch (toolError) {
console.error(`Error processing tool ${tool.name} for server ${serverName}:`, toolError);
// Continue with the next tool rather than failing the whole batch
}
// Save embedding
await vectorRepository.saveEmbedding(
'tool',
`${serverName}:${tool.name}`,
searchableText,
embedding,
{
serverName,
toolName: tool.name,
description: tool.description,
inputSchema: tool.inputSchema,
},
config.embeddingModel, // Store the model used for this embedding
);
}
console.log(`Saved ${tools.length} tool embeddings for server: ${serverName}`);
} catch (error) {
console.error(`Error saving tool embeddings for server ${serverName}:`, error);
console.error(`Error saving tool embeddings for server ${serverName}:${error}`);
}
};
@@ -381,7 +376,7 @@ export const getAllVectorizedTools = async (
}>
> => {
try {
const config = getOpenAIConfig();
const config = await getOpenAIConfig();
const vectorRepository = getRepositoryFactory(
'vectorEmbeddings',
)() as VectorEmbeddingRepository;

View File

@@ -243,6 +243,19 @@ export interface OAuthServerConfig {
};
}
// Bearer authentication key configuration
export type BearerKeyAccessType = 'all' | 'groups' | 'servers' | 'custom';
export interface BearerKey {
id: string; // Unique identifier for the key
name: string; // Human readable key name
token: string; // Bearer token value
enabled: boolean; // Whether this key is enabled
accessType: BearerKeyAccessType; // Access scope type
allowedGroups?: string[]; // Allowed group names when accessType === 'groups' or 'custom'
allowedServers?: string[]; // Allowed server names when accessType === 'servers' or 'custom'
}
// Represents the settings for MCP servers
export interface McpSettings {
users?: IUser[]; // Array of user credentials and permissions
@@ -254,6 +267,18 @@ export interface McpSettings {
userConfigs?: Record<string, UserConfig>; // User-specific configurations
oauthClients?: IOAuthClient[]; // OAuth clients for MCPHub's authorization server
oauthTokens?: IOAuthToken[]; // Persisted OAuth tokens (access + refresh) for authorization server
bearerKeys?: BearerKey[]; // Bearer authentication keys (multi-key configuration)
}
// Proxychains4 configuration for STDIO servers (Linux/macOS only)
export interface ProxychainsConfig {
enabled?: boolean; // Enable/disable proxychains4 proxy routing
type?: 'socks4' | 'socks5' | 'http'; // Proxy protocol type
host?: string; // Proxy server hostname or IP address
port?: number; // Proxy server port
username?: string; // Proxy authentication username (optional)
password?: string; // Proxy authentication password (optional)
configPath?: string; // Path to custom proxychains4 configuration file (optional, overrides above settings)
}
// Configuration details for an individual server
@@ -271,6 +296,8 @@ export interface ServerConfig {
tools?: Record<string, { enabled: boolean; description?: string }>; // Tool-specific configurations with enable/disable state and custom descriptions
prompts?: Record<string, { enabled: boolean; description?: string }>; // Prompt-specific configurations with enable/disable state and custom descriptions
options?: Partial<Pick<RequestOptions, 'timeout' | 'resetTimeoutOnProgress' | 'maxTotalTimeout'>>; // MCP request options configuration
// Proxychains4 proxy configuration for STDIO servers (Linux/macOS only, Windows not supported)
proxy?: ProxychainsConfig;
// OAuth authentication for upstream MCP servers
oauth?: {
// Static client configuration (traditional OAuth flow)
@@ -420,3 +447,50 @@ export interface AddServerRequest {
name: string; // Name of the server to add
config: ServerConfig; // Configuration details for the server
}
// Request payload for batch creating servers
export interface BatchCreateServersRequest {
servers: AddServerRequest[]; // Array of servers to create
}
// Result for a single server in batch operation
export interface BatchServerResult {
name: string; // Server name
success: boolean; // Whether the operation succeeded
message?: string; // Error message if failed
}
// Response for batch create servers operation
export interface BatchCreateServersResponse {
success: boolean; // Overall operation success (true if at least one server succeeded)
successCount: number; // Number of servers successfully created
failureCount: number; // Number of servers that failed
results: BatchServerResult[]; // Detailed results for each server
}
// Request payload for adding a new group
export interface AddGroupRequest {
name: string; // Name of the group to add
description?: string; // Optional description of the group
servers?: string[] | IGroupServerConfig[]; // Array of server names or server configurations
}
// Request payload for batch creating groups
export interface BatchCreateGroupsRequest {
groups: AddGroupRequest[]; // Array of groups to create
}
// Result for a single group in batch operation
export interface BatchGroupResult {
name: string; // Group name
success: boolean; // Whether the operation succeeded
message?: string; // Error message if failed
}
// Response for batch create groups operation
export interface BatchCreateGroupsResponse {
success: boolean; // Overall operation success (true if at least one group succeeded)
successCount: number; // Number of groups successfully created
failureCount: number; // Number of groups that failed
results: BatchGroupResult[]; // Detailed results for each group
}

122
src/utils/migration.test.ts Normal file
View File

@@ -0,0 +1,122 @@
import { jest } from '@jest/globals';
// Mocks must be defined before importing the module under test.
const initializeDatabaseMock = jest.fn(async () => undefined);
jest.mock('../db/connection.js', () => ({
initializeDatabase: initializeDatabaseMock,
}));
const setDaoFactoryMock = jest.fn();
jest.mock('../dao/DaoFactory.js', () => ({
setDaoFactory: setDaoFactoryMock,
}));
jest.mock('../dao/DatabaseDaoFactory.js', () => ({
DatabaseDaoFactory: {
getInstance: jest.fn(() => ({
/* noop */
})),
},
}));
const loadOriginalSettingsMock = jest.fn(() => ({ users: [] }));
jest.mock('../config/index.js', () => ({
loadOriginalSettings: loadOriginalSettingsMock,
}));
const userRepoCountMock = jest.fn<() => Promise<number>>();
jest.mock('../db/repositories/UserRepository.js', () => ({
UserRepository: jest.fn().mockImplementation(() => ({
count: userRepoCountMock,
})),
}));
const bearerKeyCountMock = jest.fn<() => Promise<number>>();
const bearerKeyCreateMock =
jest.fn<
(data: {
name: string;
token: string;
enabled: boolean;
accessType: string;
allowedGroups: string[];
allowedServers: string[];
}) => Promise<unknown>
>();
jest.mock('../db/repositories/BearerKeyRepository.js', () => ({
BearerKeyRepository: jest.fn().mockImplementation(() => ({
count: bearerKeyCountMock,
create: bearerKeyCreateMock,
})),
}));
const systemConfigGetMock = jest.fn<() => Promise<any>>();
jest.mock('../db/repositories/SystemConfigRepository.js', () => ({
SystemConfigRepository: jest.fn().mockImplementation(() => ({
get: systemConfigGetMock,
})),
}));
describe('initializeDatabaseMode legacy bearer auth migration', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('skips legacy migration when bearerKeys table already has data', async () => {
userRepoCountMock.mockResolvedValue(1);
bearerKeyCountMock.mockResolvedValue(2);
systemConfigGetMock.mockResolvedValue({
routing: { enableBearerAuth: true, bearerAuthKey: 'db-key' },
});
const { initializeDatabaseMode } = await import('./migration.js');
const ok = await initializeDatabaseMode();
expect(ok).toBe(true);
expect(initializeDatabaseMock).toHaveBeenCalled();
expect(loadOriginalSettingsMock).not.toHaveBeenCalled();
expect(systemConfigGetMock).not.toHaveBeenCalled();
expect(bearerKeyCreateMock).not.toHaveBeenCalled();
});
it('migrates legacy routing bearerAuthKey into bearerKeys when users exist and keys table is empty', async () => {
userRepoCountMock.mockResolvedValue(3);
bearerKeyCountMock.mockResolvedValue(0);
systemConfigGetMock.mockResolvedValue({
routing: { enableBearerAuth: true, bearerAuthKey: 'db-key' },
});
const { initializeDatabaseMode } = await import('./migration.js');
const ok = await initializeDatabaseMode();
expect(ok).toBe(true);
expect(loadOriginalSettingsMock).not.toHaveBeenCalled();
expect(systemConfigGetMock).toHaveBeenCalledTimes(1);
expect(bearerKeyCreateMock).toHaveBeenCalledTimes(1);
expect(bearerKeyCreateMock).toHaveBeenCalledWith({
name: 'default',
token: 'db-key',
enabled: true,
accessType: 'all',
allowedGroups: [],
allowedServers: [],
});
});
it('does not migrate when routing has no bearerAuthKey', async () => {
userRepoCountMock.mockResolvedValue(1);
bearerKeyCountMock.mockResolvedValue(0);
systemConfigGetMock.mockResolvedValue({
routing: { enableBearerAuth: true, bearerAuthKey: ' ' },
});
const { initializeDatabaseMode } = await import('./migration.js');
const ok = await initializeDatabaseMode();
expect(ok).toBe(true);
expect(loadOriginalSettingsMock).not.toHaveBeenCalled();
expect(systemConfigGetMock).toHaveBeenCalledTimes(1);
expect(bearerKeyCreateMock).not.toHaveBeenCalled();
});
});

View File

@@ -9,6 +9,7 @@ import { SystemConfigRepository } from '../db/repositories/SystemConfigRepositor
import { UserConfigRepository } from '../db/repositories/UserConfigRepository.js';
import { OAuthClientRepository } from '../db/repositories/OAuthClientRepository.js';
import { OAuthTokenRepository } from '../db/repositories/OAuthTokenRepository.js';
import { BearerKeyRepository } from '../db/repositories/BearerKeyRepository.js';
/**
* Migrate from file-based configuration to database
@@ -33,6 +34,7 @@ export async function migrateToDatabase(): Promise<boolean> {
const userConfigRepo = new UserConfigRepository();
const oauthClientRepo = new OAuthClientRepository();
const oauthTokenRepo = new OAuthTokenRepository();
const bearerKeyRepo = new BearerKeyRepository();
// Migrate users
if (settings.users && settings.users.length > 0) {
@@ -75,6 +77,7 @@ export async function migrateToDatabase(): Promise<boolean> {
prompts: config.prompts,
options: config.options,
oauth: config.oauth,
openapi: config.openapi,
});
console.log(` - Created server: ${name}`);
} else {
@@ -119,6 +122,52 @@ export async function migrateToDatabase(): Promise<boolean> {
console.log(' - System configuration updated');
}
// Migrate bearer auth keys
console.log('Migrating bearer authentication keys...');
// Prefer explicit bearerKeys if present in settings
if (Array.isArray(settings.bearerKeys) && settings.bearerKeys.length > 0) {
for (const key of settings.bearerKeys) {
await bearerKeyRepo.create({
name: key.name,
token: key.token,
enabled: key.enabled,
accessType: key.accessType,
allowedGroups: key.allowedGroups ?? [],
allowedServers: key.allowedServers ?? [],
} as any);
console.log(` - Migrated bearer key: ${key.name} (${key.id ?? 'no-id'})`);
}
} else if (settings.systemConfig?.routing) {
// Fallback to legacy routing.enableBearerAuth / bearerAuthKey
const routing = settings.systemConfig.routing as any;
const enableBearerAuth: boolean = !!routing.enableBearerAuth;
const rawKey: string = (routing.bearerAuthKey || '').trim();
// Migration rules:
// 1) enable=false, key empty -> no keys
// 2) enable=false, key present -> one disabled key (name=default)
// 3) enable=true, key present -> one enabled key (name=default)
// 4) enable=true, key empty -> no keys
if (rawKey) {
await bearerKeyRepo.create({
name: 'default',
token: rawKey,
enabled: enableBearerAuth,
accessType: 'all',
allowedGroups: [],
allowedServers: [],
} as any);
console.log(
` - Migrated legacy bearer auth config to key: default (enabled=${enableBearerAuth})`,
);
} else {
console.log(' - No legacy bearer auth key found, skipping bearer key migration');
}
} else {
console.log(' - No bearer auth configuration found, skipping bearer key migration');
}
// Migrate user configs
if (settings.userConfigs) {
const usernames = Object.keys(settings.userConfigs);
@@ -206,6 +255,9 @@ export async function initializeDatabaseMode(): Promise<boolean> {
// Check if migration is needed
const userRepo = new UserRepository();
const bearerKeyRepo = new BearerKeyRepository();
const systemConfigRepo = new SystemConfigRepository();
const userCount = await userRepo.count();
if (userCount === 0) {
@@ -216,6 +268,36 @@ export async function initializeDatabaseMode(): Promise<boolean> {
}
} else {
console.log(`Database already contains ${userCount} users, skipping migration`);
// One-time migration for legacy bearer auth config stored inside DB routing settings.
// If bearerKeys table already has data, do nothing.
const bearerKeyCount = await bearerKeyRepo.count();
if (bearerKeyCount > 0) {
console.log(
`Bearer keys table already contains ${bearerKeyCount} keys, skipping legacy bearer auth migration`,
);
} else {
const systemConfig = await systemConfigRepo.get();
const routing = (systemConfig as any)?.routing || {};
const enableBearerAuth: boolean = !!routing.enableBearerAuth;
const rawKey: string = (routing.bearerAuthKey || '').trim();
if (rawKey) {
await bearerKeyRepo.create({
name: 'default',
token: rawKey,
enabled: enableBearerAuth,
accessType: 'all',
allowedGroups: [],
allowedServers: [],
} as any);
console.log(
` - Migrated legacy DB routing bearer auth config to key: default (enabled=${enableBearerAuth})`,
);
} else {
console.log('No legacy DB routing bearer auth key found, skipping bearer key migration');
}
}
}
console.log('✅ Database mode initialized successfully');

View File

@@ -1,4 +1,5 @@
import { loadSettings, expandEnvVars } from '../config/index.js';
import { expandEnvVars } from '../config/index.js';
import { getSystemConfigDao } from '../dao/DaoFactory.js';
/**
* Smart routing configuration interface
@@ -16,16 +17,17 @@ export interface SmartRoutingConfig {
*
* Priority order for each setting:
* 1. Specific environment variables (ENABLE_SMART_ROUTING, SMART_ROUTING_ENABLED, etc.)
* 2. Generic environment variables (OPENAI_API_KEY, DATABASE_URL, etc.)
* 2. Generic environment variables (OPENAI_API_KEY, DB_URL, etc.)
* 3. Settings configuration (systemConfig.smartRouting)
* 4. Default values
*
* @returns {SmartRoutingConfig} Complete smart routing configuration
*/
export function getSmartRoutingConfig(): SmartRoutingConfig {
const settings = loadSettings();
const smartRoutingSettings: Partial<SmartRoutingConfig> =
settings.systemConfig?.smartRouting || {};
export async function getSmartRoutingConfig(): Promise<SmartRoutingConfig> {
// Get system config from DAO
const systemConfigDao = getSystemConfigDao();
const systemConfig = await systemConfigDao.get();
const smartRoutingSettings: Partial<SmartRoutingConfig> = systemConfig.smartRouting || {};
return {
// Enabled status - check multiple environment variables

View File

@@ -1,11 +1,7 @@
import { getMcpSettingsJson } from '../../src/controllers/configController.js';
import * as config from '../../src/config/index.js';
import * as DaoFactory from '../../src/dao/DaoFactory.js';
import { Request, Response } from 'express';
// Mock the config module
jest.mock('../../src/config/index.js');
// Mock the DaoFactory module
jest.mock('../../src/dao/DaoFactory.js');
describe('ConfigController - getMcpSettingsJson', () => {
@@ -13,9 +9,18 @@ describe('ConfigController - getMcpSettingsJson', () => {
let mockResponse: Partial<Response>;
let mockJson: jest.Mock;
let mockStatus: jest.Mock;
let mockServerDao: { findById: jest.Mock };
let mockServerDao: { findById: jest.Mock; findAll: jest.Mock };
let mockUserDao: { findAll: jest.Mock };
let mockGroupDao: { findAll: jest.Mock };
let mockSystemConfigDao: { get: jest.Mock };
let mockUserConfigDao: { getAll: jest.Mock };
let mockOAuthClientDao: { findAll: jest.Mock };
let mockOAuthTokenDao: { findAll: jest.Mock };
let mockBearerKeyDao: { findAll: jest.Mock };
beforeEach(() => {
jest.clearAllMocks();
mockJson = jest.fn();
mockStatus = jest.fn().mockReturnThis();
mockRequest = {
@@ -25,40 +30,28 @@ describe('ConfigController - getMcpSettingsJson', () => {
json: mockJson,
status: mockStatus,
};
mockServerDao = {
findById: jest.fn(),
findAll: jest.fn(),
};
mockUserDao = { findAll: jest.fn() };
mockGroupDao = { findAll: jest.fn() };
mockSystemConfigDao = { get: jest.fn() };
mockUserConfigDao = { getAll: jest.fn() };
mockOAuthClientDao = { findAll: jest.fn() };
mockOAuthTokenDao = { findAll: jest.fn() };
mockBearerKeyDao = { findAll: jest.fn() };
// Setup ServerDao mock
(DaoFactory.getServerDao as jest.Mock).mockReturnValue(mockServerDao);
// Reset mocks
jest.clearAllMocks();
});
describe('Full Settings Export', () => {
it('should handle settings without users array', async () => {
const mockSettings = {
mcpServers: {
'test-server': {
command: 'test',
args: ['--test'],
},
},
};
(config.loadOriginalSettings as jest.Mock).mockReturnValue(mockSettings);
await getMcpSettingsJson(mockRequest as Request, mockResponse as Response);
expect(mockJson).toHaveBeenCalledWith({
success: true,
data: {
mcpServers: mockSettings.mcpServers,
users: undefined,
},
});
});
// Wire DaoFactory convenience functions to our mocks
(DaoFactory.getServerDao as unknown as jest.Mock).mockReturnValue(mockServerDao);
(DaoFactory.getUserDao as unknown as jest.Mock).mockReturnValue(mockUserDao);
(DaoFactory.getGroupDao as unknown as jest.Mock).mockReturnValue(mockGroupDao);
(DaoFactory.getSystemConfigDao as unknown as jest.Mock).mockReturnValue(mockSystemConfigDao);
(DaoFactory.getUserConfigDao as unknown as jest.Mock).mockReturnValue(mockUserConfigDao);
(DaoFactory.getOAuthClientDao as unknown as jest.Mock).mockReturnValue(mockOAuthClientDao);
(DaoFactory.getOAuthTokenDao as unknown as jest.Mock).mockReturnValue(mockOAuthTokenDao);
(DaoFactory.getBearerKeyDao as unknown as jest.Mock).mockReturnValue(mockBearerKeyDao);
});
describe('Individual Server Export', () => {
@@ -146,10 +139,14 @@ describe('ConfigController - getMcpSettingsJson', () => {
describe('Error Handling', () => {
it('should handle errors gracefully and return 500', async () => {
const errorMessage = 'Failed to load settings';
(config.loadOriginalSettings as jest.Mock).mockImplementation(() => {
throw new Error(errorMessage);
});
mockServerDao.findAll.mockRejectedValue(new Error('boom'));
mockUserDao.findAll.mockResolvedValue([]);
mockGroupDao.findAll.mockResolvedValue([]);
mockSystemConfigDao.get.mockResolvedValue({});
mockUserConfigDao.getAll.mockResolvedValue({});
mockOAuthClientDao.findAll.mockResolvedValue([]);
mockOAuthTokenDao.findAll.mockResolvedValue([]);
mockBearerKeyDao.findAll.mockResolvedValue([]);
await getMcpSettingsJson(mockRequest as Request, mockResponse as Response);

View File

@@ -0,0 +1,97 @@
import fs from 'fs';
import os from 'os';
import path from 'path';
import { BearerKeyDaoImpl } from '../../src/dao/BearerKeyDao.js';
const writeSettings = (settingsPath: string, settings: unknown): void => {
fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2), 'utf8');
};
describe('BearerKeyDaoImpl migration + settings caching behavior', () => {
let tmpDir: string;
let settingsPath: string;
let originalSettingsEnv: string | undefined;
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mcphub-bearer-keys-'));
settingsPath = path.join(tmpDir, 'mcp_settings.json');
originalSettingsEnv = process.env.MCPHUB_SETTING_PATH;
process.env.MCPHUB_SETTING_PATH = settingsPath;
});
afterEach(() => {
if (originalSettingsEnv === undefined) {
delete process.env.MCPHUB_SETTING_PATH;
} else {
process.env.MCPHUB_SETTING_PATH = originalSettingsEnv;
}
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// ignore cleanup errors
}
});
it('does not rewrite settings when bearerKeys exists as an empty array', async () => {
writeSettings(settingsPath, {
mcpServers: {},
users: [],
systemConfig: {
routing: {
enableBearerAuth: false,
bearerAuthKey: '',
},
},
bearerKeys: [],
});
const writeSpy = jest.spyOn(fs, 'writeFileSync');
const dao = new BearerKeyDaoImpl();
const enabled1 = await dao.findEnabled();
const enabled2 = await dao.findEnabled();
expect(enabled1).toEqual([]);
expect(enabled2).toEqual([]);
// The DAO should NOT persist anything because bearerKeys already exists.
expect(writeSpy).not.toHaveBeenCalled();
writeSpy.mockRestore();
});
it('migrates legacy bearerAuthKey only once', async () => {
writeSettings(settingsPath, {
mcpServers: {},
users: [],
systemConfig: {
routing: {
enableBearerAuth: true,
bearerAuthKey: 'legacy-token',
},
},
// bearerKeys is intentionally missing to trigger migration
});
const writeSpy = jest.spyOn(fs, 'writeFileSync');
const dao = new BearerKeyDaoImpl();
const enabled1 = await dao.findEnabled();
expect(enabled1).toHaveLength(1);
expect(enabled1[0].token).toBe('legacy-token');
expect(enabled1[0].enabled).toBe(true);
const enabled2 = await dao.findEnabled();
expect(enabled2).toHaveLength(1);
expect(enabled2[0].token).toBe('legacy-token');
// One write for the migration, no further writes on subsequent reads.
expect(writeSpy).toHaveBeenCalledTimes(1);
writeSpy.mockRestore();
});
});

View File

@@ -31,14 +31,28 @@ jest.mock('../../src/utils/oauthBearer.js', () => ({
resolveOAuthUserFromToken: jest.fn(),
}));
// Mock DAO accessors used by sseService (avoid file-based DAOs and migrations)
jest.mock('../../src/dao/index.js', () => ({
getBearerKeyDao: jest.fn(),
getGroupDao: jest.fn(),
getSystemConfigDao: jest.fn(),
}));
// Mock config module default export used by sseService
jest.mock('../../src/config/index.js', () => ({
__esModule: true,
default: { basePath: '' },
loadSettings: jest.fn(),
}));
import { Request, Response } from 'express';
import { handleSseConnection, transports } from '../../src/services/sseService.js';
import * as mcpService from '../../src/services/mcpService.js';
import * as configModule from '../../src/config/index.js';
import * as daoIndex from '../../src/dao/index.js';
// Mock remaining dependencies
jest.mock('../../src/services/mcpService.js');
jest.mock('../../src/config/index.js');
// Mock UserContextService with getInstance pattern
const mockUserContextService = {
@@ -141,6 +155,24 @@ describe('Keepalive Functionality', () => {
};
(mcpService.getMcpServer as jest.Mock).mockReturnValue(mockMcpServer);
// Mock bearer key + system config DAOs used by sseService
const mockBearerKeyDao = {
findEnabled: jest.fn().mockResolvedValue([]),
};
(daoIndex.getBearerKeyDao as unknown as jest.Mock).mockReturnValue(mockBearerKeyDao);
const mockSystemConfigDao = {
get: jest.fn().mockResolvedValue({
routing: {
enableGlobalRoute: true,
enableGroupNameRoute: true,
enableBearerAuth: false,
bearerAuthKey: '',
},
}),
};
(daoIndex.getSystemConfigDao as unknown as jest.Mock).mockReturnValue(mockSystemConfigDao);
// Mock loadSettings
(configModule.loadSettings as jest.Mock).mockReturnValue({
systemConfig: {

View File

@@ -5,7 +5,7 @@ import 'reflect-metadata';
Object.assign(process.env, {
NODE_ENV: 'test',
JWT_SECRET: 'test-jwt-secret-key',
DATABASE_URL: 'sqlite::memory:',
DB_URL: 'sqlite::memory:',
});
// Mock moduleDir to avoid import.meta parsing issues in Jest
@@ -40,7 +40,7 @@ expect.extend({
};
}
},
toBeValidUUID(received: any) {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
const pass = typeof received === 'string' && uuidRegex.test(received);