diff --git a/.gitignore b/.gitignore index 45d815f0..d28ac84d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,8 @@ dist **/.DS_Store /nix/ /.mcp.json +/test/embeddings/.clj-kondo/ +/test/embeddings/.cpcache/ +/test/embeddings/.direnv/ +/test/embeddings/.envrc +/test/embeddings/.lsp/ diff --git a/cmd/docker-mcp/commands/feature.go b/cmd/docker-mcp/commands/feature.go index 77629543..03bf9440 100644 --- a/cmd/docker-mcp/commands/feature.go +++ b/cmd/docker-mcp/commands/feature.go @@ -153,7 +153,7 @@ func featureListCommand(dockerCli command.Cli, features features.Features) *cobr fmt.Println() // Show all known features - knownFeatures := []string{"oauth-interceptor", "mcp-oauth-dcr", "dynamic-tools", "tool-name-prefix"} + knownFeatures := []string{"oauth-interceptor", "mcp-oauth-dcr", "dynamic-tools", "tool-name-prefix", "use-embeddings"} if !features.IsRunningInDockerDesktop() { knownFeatures = append(knownFeatures, "profiles") } @@ -177,6 +177,8 @@ func featureListCommand(dockerCli command.Cli, features features.Features) *cobr fmt.Printf(" %-20s %s\n", "", "Enable profile management tools (docker mcp profile )") case "tool-name-prefix": fmt.Printf(" %-20s %s\n", "", "Prefix all tool names with server name to avoid conflicts") + case "use-embeddings": + fmt.Printf(" %-20s %s\n", "", "Enable vector similarity search for find-tools (requires OPENAI_API_KEY)") } fmt.Println() } @@ -215,6 +217,7 @@ func isFeatureEnabledFromConfig(configFile *configfile.ConfigFile, feature strin // Features that are enabled by default defaultEnabledFeatures := map[string]bool{ "mcp-oauth-dcr": true, + "dynamic-tools": true, } if configFile.Features == nil { @@ -246,6 +249,7 @@ func isKnownFeature(feature string, features features.Features) bool { "mcp-oauth-dcr", "dynamic-tools", "tool-name-prefix", + "use-embeddings", } if !features.IsRunningInDockerDesktop() { knownFeatures = append(knownFeatures, "profiles") diff --git a/cmd/docker-mcp/commands/feature_test.go b/cmd/docker-mcp/commands/feature_test.go index c7298bf8..b8cff41c 100644 --- a/cmd/docker-mcp/commands/feature_test.go +++ b/cmd/docker-mcp/commands/feature_test.go @@ -65,7 +65,7 @@ func TestIsFeatureEnabledDynamicTools(t *testing.T) { Features: make(map[string]string), } enabled := isFeatureEnabledFromConfig(configFile, "dynamic-tools") - assert.False(t, enabled, "dynamic-tools should default to disabled when missing") + assert.True(t, enabled, "dynamic-tools should default to enabled when missing") }) t.Run("nil features map", func(t *testing.T) { @@ -73,7 +73,7 @@ func TestIsFeatureEnabledDynamicTools(t *testing.T) { Features: nil, } enabled := isFeatureEnabledFromConfig(configFile, "dynamic-tools") - assert.False(t, enabled, "dynamic-tools should default to disabled when Features is nil") + assert.True(t, enabled, "dynamic-tools should default to enabled when Features is nil") }) } diff --git a/cmd/docker-mcp/commands/gateway.go b/cmd/docker-mcp/commands/gateway.go index 191eb6bc..1c1c6fdd 100644 --- a/cmd/docker-mcp/commands/gateway.go +++ b/cmd/docker-mcp/commands/gateway.go @@ -99,6 +99,9 @@ func gatewayCommand(docker docker.Client, dockerCli command.Cli, features featur // Check if tool name prefix feature is enabled options.ToolNamePrefix = isToolNamePrefixFeatureEnabled(dockerCli) + // Check if use-embeddings feature is enabled + options.UseEmbeddings = isUseEmbeddingsFeatureEnabled(dockerCli) + // Update catalog URL based on mcp-oauth-dcr flag if using default Docker catalog URL if len(options.CatalogPath) == 1 && (options.CatalogPath[0] == catalog.DockerCatalogURLV2 || options.CatalogPath[0] == catalog.DockerCatalogURLV3) { options.CatalogPath[0] = catalog.GetDockerCatalogURL(options.McpOAuthDcrEnabled) @@ -375,3 +378,17 @@ func setLegacyDefaults(options *gateway.Config) { } } } + +// isUseEmbeddingsFeatureEnabled checks if the use-embeddings feature is enabled +func isUseEmbeddingsFeatureEnabled(dockerCli command.Cli) bool { + configFile := dockerCli.ConfigFile() + if configFile == nil || configFile.Features == nil { + return false + } + + value, exists := configFile.Features["use-embeddings"] + if !exists { + return false + } + return value == "enabled" +} diff --git a/docs/feature-specs/embeddings-feature.md b/docs/feature-specs/embeddings-feature.md new file mode 100644 index 00000000..3d860d55 --- /dev/null +++ b/docs/feature-specs/embeddings-feature.md @@ -0,0 +1,297 @@ +--- +marp: true +theme: default +paginate: true +--- + +# Vector Embeddings for MCP Gateway + +**Semantic Search for MCP Servers and Tools** + +--- + +## Overview + +Added AI-powered semantic search capabilities to the MCP Gateway using OpenAI embeddings and SQLite vector database. + +**Key Features:** +- Vector similarity search for finding relevant MCP servers +- Feature flag system for opt-in usage +- Graceful fallback to keyword search + +--- + +## Components + +```mermaid + flowchart TD + A["MCP Gateway"] --> B["Embeddings Client"] + B --> C["Docker Container"] + C --> D["vector-db MCP Server"] + D --> E["SQLite-vec extension"] + E --> F["~/.docker/mcp/vectors.db"] + A --> G["OpenAI API"] + G --> H["text-embedding-3-small"] + style A fill:#e1f5ff + style B fill:#fff3e0 + style D fill:#f3e5f5 +``` + +--- + +## Feature Flag System + +```bash +# Enable embeddings feature +docker mcp feature enable use-embeddings + +# Set OpenAI API key +export OPENAI_API_KEY="sk-..." +``` + +**Configuration:** +- Feature: `use-embeddings` (disabled by default) +- Storage: `~/.docker/mcp/vectors.db` +- Model: OpenAI `text-embedding-3-small` (1536 dimensions) + +--- + +## How mcp-find Works with Embeddings + +```mermaid + sequenceDiagram + participant User + participant Gateway + participant OpenAI + participant VectorDB + participant Catalog + User->>Gateway: mcp-find "tools for github" + alt Embeddings Enabled + Gateway->>OpenAI: Generate embedding for query + OpenAI-->>Gateway: [0.014, -0.018, ...] + Gateway->>VectorDB: Search mcp-server-collection + VectorDB-->>Gateway: Top 5 similar servers + Gateway->>Catalog: Lookup server details + Catalog-->>Gateway: Server metadata + else Embeddings Disabled + Gateway->>Catalog: Keyword search (title/description) + Catalog-->>Gateway: Matching servers + end + Gateway-->>User: Server results +``` + +--- + +## Implementation: VectorDBClient + +```go +type VectorDBClient struct { + cmd *exec.Cmd + client *mcp.Client + session *mcp.ClientSession +} + +// Connects to Docker container running vector-db +func NewVectorDBClient(ctx context.Context, dataDir string) (*VectorDBClient, error) { + cmd := exec.CommandContext(ctx, + "docker", "run", "-i", "--rm", + "-v", fmt.Sprintf("%s:/data", dataDir), + "-e", "DB_PATH=/data/vectors.db", + "-e", "VECTOR_DIMENSION=1536", + "jimclark106/vector-db:latest", + ) + // ... MCP client setup with CommandTransport +} +``` + +--- + +## Key Operations + +```go +type SearchArgs struct { + Vector []float64 `json:"vector"` + CollectionName string `json:"collection_name,omitempty"` + ExcludeCollections []string `json:"exclude_collections,omitempty"` + Limit int `json:"limit,omitempty"` +} + +func (c *VectorDBClient) SearchVectors( + ctx context.Context, + vector []float64, + options *SearchOptions, +) ([]SearchResult, error) +``` +**Collections:** +- `Tool embeddings`: one vector collection/server +- `mcp-server-collection`: MCP server metadata + +--- + +## Updated Tools + +### mcp-find Tool +- **With Embeddings:** Semantic search against `mcp-server-collection` +- **Without Embeddings:** Traditional keyword matching +- Returns: Server name, description, secrets, config schema + +### find-tools Tool +- **With Embeddings:** AI-powered server recommendations +- **Without Embeddings:** Error message with guidance +- Input: Natural language task description +- Output: Relevant servers from catalog + +--- + +## Code Structure + +``` +pkg/gateway/ +├── embeddings/ +│ └── client.go # VectorDBClient implementation +├── findmcps.go # findServersByEmbedding() +├── findtools.go # generateEmbedding() + find-tools tool +└── dynamic_mcps.go # mcp-find tool + decodeArguments() + +cmd/docker-mcp/commands/ +├── feature.go # use-embeddings feature flag +└── gateway.go # Feature check + client init +``` + +--- + +## Embeddings Client Lifecycle + +```mermaid +stateDiagram-v2 + [*] --> Checking: Gateway starts + Checking --> Disabled: Feature flag off + Checking --> Disabled: No OPENAI_API_KEY + Checking --> Initializing: Feature enabled + API key set + Initializing --> Running: Docker container started + Initializing --> Disabled: Init failed (logged warning) + Running --> SearchVectors: find-tools called + Running --> AddVector: Store embeddings + Running --> ListCollections: Manage collections + Running --> [*]: Gateway stops + Disabled --> [*]: Gateway stops +``` + +--- + +## Data Flow: Semantic Search + +```mermaid +flowchart LR + A[User Query] --> B[Generate Embedding] + B --> C[OpenAI API] + C --> D[1536-dim Vector] + D --> E[Search VectorDB] + E --> F[Top K Results] + F --> G[Extract Server Names] + G --> H[Lookup in Catalog] + H --> I[Return Server Info] + style A fill:#e3f2fd + style D fill:#fff3e0 + style F fill:#f3e5f5 + style I fill:#e8f5e9 +``` + +--- + +## Benefits + +✅ **Natural Language Search** +- "tools for github" → github server +- "database queries" → sqlite server + +✅ **Better Discovery** +- Semantic matching vs exact keywords +- Handles synonyms and related concepts + +✅ **Graceful Degradation** +- Falls back to keyword search if unavailable +- Gateway continues working without embeddings + +✅ **Opt-in Feature** +- Disabled by default +- Requires explicit enablement + API key + +--- + +## Testing & Validation + +**Build:** +```bash +make docker-mcp +make lint +``` + +**Enable Feature:** +```bash +docker mcp feature enable use-embeddings +export OPENAI_API_KEY="sk-..." +docker mcp gateway run +``` + +**Test Search:** +```bash +# Via Claude Desktop or other MCP client +Find mcp tools that can help me ... +``` + +--- + +## Implementation Details + +**Files Created:** +- `pkg/gateway/embeddings/client.go` (318 lines) +- `pkg/gateway/findmcps.go` (83 lines) + +**Files Modified:** +- `pkg/gateway/dynamic_mcps.go` - Base64 decoding + mcp-find handler +- `pkg/gateway/findtools.go` - Embedding generation +- `pkg/gateway/run.go` - Client initialization +- `pkg/gateway/config.go` - UseEmbeddings flag +- `cmd/docker-mcp/commands/feature.go` - Feature registration +- `cmd/docker-mcp/commands/gateway.go` - Feature check + +--- + +## Future Enhancements + +🔮 **Possible Improvements:** +- distribute embeddings with catalogs +- Support other embedding models and providers +- Automatic reindexing on catalog changes + +--- + +## Summary + +✨ **What We Built:** +- sqlite-vec integration +- LLM embeddings integration +- Semantic search for MCP servers + +🎯 **Impact:** +- Better server/tool discovery +- Natural language queries +- Foundation for AI-powered gateway + +--- + +## Questions? + +**Documentation:** +- Feature flags: `docker mcp feature ls` +- sqlite-vec service image: `jimclark106/vector-db:latest` +- model: `text-embedding-3-small` + +**Storage:** +- `~/.docker/mcp/vectors.db` +- `~/.docker/config.json` (feature flags) + +**Code:** +- Branch: `slim/embeddings` +- Main files: `pkg/gateway/embeddings/`, `findmcps.go` diff --git a/examples/embeddings/README.md b/examples/embeddings/README.md new file mode 100644 index 00000000..1dbe9148 --- /dev/null +++ b/examples/embeddings/README.md @@ -0,0 +1,62 @@ +# Embeddings OCI Examples + +This directory contains examples for pulling and pushing vector database embeddings to/from OCI registries. + +## Pull Example + +Downloads the embeddings OCI artifact and installs the vector.db directory to `~/.docker/mcp/`. + +### Usage + +```bash +# From repository root +go run ./examples/embeddings/pull/main.go +``` + +The Pull function will: +1. Download the image from `jimclark106/embeddings:latest` +2. Extract all layers to a temporary directory +3. Verify that `vectors.db` file exists +4. Copy `vectors.db` to `~/.docker/mcp/` (skips if already exists) +5. Clean up temporary files + +## Push Example + +Creates an OCI artifact from a local vector.db directory and pushes it to a registry. + +### Usage + +```bash +# From repository root +go run ./examples/embeddings/push/main.go +``` + +### Example + +```bash +# Push the local vectors.db to your own registry +go run ./examples/embeddings/push/main.go ~/.docker/mcp/vectors.db jimclark106/embeddings:v1.0 +``` + +The Push function will: +1. Verify the source directory exists +2. Create a tar archive from the entire directory tree (always naming the root as `vectors.db` in the archive) +3. Create an OCI image layer from the tar +4. Push the image to the specified OCI reference + +Note: Regardless of your local directory name, the OCI artifact will always contain `vectors.db` at the root for consistency. + +## Authentication + +Both examples use the Docker credential helper for authentication. Make sure you're logged in to the registry: + +```bash +docker login +``` + +## Notes + +- Pull is idempotent - it won't overwrite existing `vectors.db` files +- Push requires write access to the specified OCI registry +- Push always stores the directory as `vectors.db` in the OCI artifact for consistency +- File permissions and symlinks are preserved during push/pull operations diff --git a/examples/embeddings/pull/main.go b/examples/embeddings/pull/main.go new file mode 100644 index 00000000..7e1b6e78 --- /dev/null +++ b/examples/embeddings/pull/main.go @@ -0,0 +1,20 @@ +package main + +import ( + "context" + "fmt" + "os" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" +) + +func main() { + fmt.Println("Pulling embeddings from OCI registry...") + + if err := embeddings.Pull(context.Background()); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + + fmt.Println("Successfully pulled embeddings!") +} diff --git a/examples/embeddings/push/main.go b/examples/embeddings/push/main.go new file mode 100644 index 00000000..8a56f5a8 --- /dev/null +++ b/examples/embeddings/push/main.go @@ -0,0 +1,31 @@ +package main + +import ( + "context" + "fmt" + "os" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" +) + +func main() { + if len(os.Args) != 3 { + fmt.Fprintf(os.Stderr, "Usage: %s \n", os.Args[0]) + fmt.Fprintf(os.Stderr, "\nExample:\n") + fmt.Fprintf(os.Stderr, " %s ~/.docker/mcp/vectors.db jimclark106/embeddings:v1.0\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "\nNote: The directory will be stored as 'vectors.db' in the OCI artifact.\n") + os.Exit(1) + } + + vectorDBPath := os.Args[1] + ociRef := os.Args[2] + + fmt.Printf("Pushing vector database from %s to %s...\n", vectorDBPath, ociRef) + + if err := embeddings.Push(context.Background(), vectorDBPath, ociRef); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + + fmt.Printf("Successfully pushed to %s!\n", ociRef) +} diff --git a/examples/sqlite-vec/.gitignore b/examples/sqlite-vec/.gitignore new file mode 100644 index 00000000..fe88a65e --- /dev/null +++ b/examples/sqlite-vec/.gitignore @@ -0,0 +1,25 @@ +# Data directory +data/ +*.db +*.db-shm +*.db-wal + +# Go build artifacts +vector-server +*.exe +*.test +*.out + +# Go dependencies +go.sum + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db diff --git a/examples/sqlite-vec/Dockerfile b/examples/sqlite-vec/Dockerfile new file mode 100644 index 00000000..f1911578 --- /dev/null +++ b/examples/sqlite-vec/Dockerfile @@ -0,0 +1,46 @@ +# Build stage +FROM golang:1.24-alpine AS builder + +# Install build dependencies +RUN apk add --no-cache gcc musl-dev sqlite-dev + +WORKDIR /build + +# Copy go mod files +COPY go.mod go.sum* ./ +RUN go mod download + +# Copy source code +COPY main.go ./ + +# Build the application with sqlite extension support +RUN CGO_ENABLED=1 go build -tags "sqlite_extensions" -o vector-server -ldflags="-s -w" main.go + +# Runtime stage +FROM alpine:latest + +# Install runtime dependencies +RUN apk add --no-cache sqlite-libs ca-certificates wget + +# Download and install sqlite-vec extension (x86_64 only) +WORKDIR /tmp +RUN wget https://github.com/asg017/sqlite-vec/releases/download/v0.1.1/sqlite-vec-0.1.1-loadable-linux-x86_64.tar.gz && \ + tar xzf sqlite-vec-0.1.1-loadable-linux-x86_64.tar.gz && \ + mkdir -p /usr/local/lib/sqlite && \ + mv vec0.so /usr/local/lib/sqlite/ && \ + rm -rf /tmp/* + +# Copy binary from builder +COPY --from=builder /build/vector-server /usr/local/bin/ + +# Create data directory +RUN mkdir -p /data + +# Set environment variables +ENV DB_PATH=/data/vectors.db +ENV VECTOR_DIMENSION=1536 + +WORKDIR /data + +# Run as MCP stdio server +CMD ["vector-server"] diff --git a/examples/sqlite-vec/Makefile b/examples/sqlite-vec/Makefile new file mode 100644 index 00000000..6aa295ea --- /dev/null +++ b/examples/sqlite-vec/Makefile @@ -0,0 +1,75 @@ +.PHONY: help build push build-push run clean test lint + +# Docker image configuration +IMAGE_NAME := jimclark106/vector-db +TAG ?= latest +PLATFORM ?= linux/amd64 + +# Local build configuration +BINARY_NAME := sqlite-vec-mcp +GO_BUILD_FLAGS := -tags "sqlite_extensions" -ldflags="-s -w" + +help: ## Show this help message + @echo 'Usage: make [target]' + @echo '' + @echo 'Available targets:' + @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf " %-20s %s\n", $$1, $$2}' $(MAKEFILE_LIST) + +build: ## Build Docker image for linux/amd64 + docker buildx build \ + --platform linux/amd64 \ + -t $(IMAGE_NAME):$(TAG) \ + --load \ + . + +build-multi: ## Build Docker image for multiple platforms + docker buildx build \ + --platform $(PLATFORM) \ + -t $(IMAGE_NAME):$(TAG) \ + --load \ + . + +build-push: ## Build and push Docker image to registry + docker buildx build \ + --platform $(PLATFORM) \ + -t $(IMAGE_NAME):$(TAG) \ + --push \ + . + +push: ## Push Docker image to registry + docker push $(IMAGE_NAME):$(TAG) + +run: ## Run the MCP server locally in Docker + docker run --rm -i \ + --platform $(PLATFORM) \ + -v $(PWD)/data:/data \ + -e DB_PATH=/data/vectors.db \ + -e VECTOR_DIMENSION=1536 \ + $(IMAGE_NAME):$(TAG) + +build-local: ## Build the binary locally (for testing) + CGO_ENABLED=1 go build $(GO_BUILD_FLAGS) -o $(BINARY_NAME) main.go + +run-local: build-local ## Build and run the binary locally + ./$(BINARY_NAME) + +clean: ## Clean up built artifacts + rm -f $(BINARY_NAME) + rm -rf data/*.db + docker rmi $(IMAGE_NAME):$(TAG) || true + +test: ## Run tests + go test -v ./... + +lint: ## Run linters + golangci-lint run ./... + +fmt: ## Format code + go fmt ./... + gofmt -w main.go + +deps: ## Download dependencies + go mod download + go mod tidy + +.DEFAULT_GOAL := help diff --git a/examples/sqlite-vec/README.md b/examples/sqlite-vec/README.md new file mode 100644 index 00000000..6aa05dff --- /dev/null +++ b/examples/sqlite-vec/README.md @@ -0,0 +1,433 @@ +# SQLite-Vec MCP Server + +A Model Context Protocol (MCP) server for storing and searching vector embeddings using SQLite with the sqlite-vec extension. This server exposes vector database operations as MCP tools that can be used by AI assistants and other MCP clients. + +## Features + +- **MCP stdio Protocol**: Communicates via stdin/stdout following the MCP specification +- **Collection Management**: Organize vectors into named collections +- **Vector Storage**: Store embeddings with custom metadata +- **Similarity Search**: Search within specific collections, across all collections, or exclude specific collections +- **Docker-based**: Run as a containerized MCP server +- **6 MCP Tools**: Complete vector database operations exposed as tools + +## Quick Start + +### Build and Run with Docker + +```bash +# Build the Docker image +docker build -t jimclark106/vector-db:latest . + +# Or use the Makefile +make build + +# Run the MCP server (stdio mode) +docker run --rm -i \ + -v $(pwd)/data:/data \ + -e DB_PATH=/data/vectors.db \ + -e VECTOR_DIMENSION=1536 \ + jimclark106/vector-db:latest +``` + +### Configuration + +Set the following environment variables: + +- `VECTOR_DIMENSION`: Dimension of your embeddings (default: 1536 for OpenAI ada-002) +- `DB_PATH`: SQLite database file path (default: /data/vectors.db) +- `VEC_EXT_PATH`: Path to sqlite-vec extension (default: /usr/local/lib/sqlite/vec0) + +## MCP Tools + +The server exposes the following MCP tools: + +### 1. list_collections + +List all vector collections in the database. + +**Parameters:** None + +**Example:** +```json +{ + "name": "list_collections", + "arguments": {} +} +``` + +**Response:** +```json +[ + { + "id": 1, + "name": "code_embeddings", + "created_at": "2025-01-08 10:30:00" + } +] +``` + +### 2. create_collection + +Create a new vector collection. + +**Parameters:** +- `name` (string, required): Name of the collection to create + +**Example:** +```json +{ + "name": "create_collection", + "arguments": { + "name": "my_collection" + } +} +``` + +### 3. delete_collection + +Delete a collection and all its vectors (cascade delete). + +**Parameters:** +- `name` (string, required): Name of the collection to delete + +**Example:** +```json +{ + "name": "delete_collection", + "arguments": { + "name": "old_collection" + } +} +``` + +### 4. add_vector + +Add a vector to a collection (creates collection if it doesn't exist). + +**Parameters:** +- `collection_name` (string, required): Name of the collection +- `vector` (array of numbers, required): Vector embedding (must match configured dimension) +- `metadata` (object, optional): Optional metadata as JSON object + +**Example:** +```json +{ + "name": "add_vector", + "arguments": { + "collection_name": "code_embeddings", + "vector": [0.1, 0.2, 0.3, ..., 0.5], + "metadata": { + "file": "main.go", + "line": 42, + "function": "main" + } + } +} +``` + +**Response:** +```json +{ + "id": 123, + "collection_id": 1 +} +``` + +### 5. delete_vector + +Delete a vector by its ID. + +**Parameters:** +- `id` (integer, required): ID of the vector to delete + +**Example:** +```json +{ + "name": "delete_vector", + "arguments": { + "id": 123 + } +} +``` + +### 6. search + +Search for similar vectors using cosine distance. + +**Parameters:** +- `vector` (array of numbers, required): Query vector (must match configured dimension) +- `limit` (integer, optional): Maximum number of results to return (default: 10) +- `collection_name` (string, optional): Search only within this collection +- `exclude_collections` (array of strings, optional): Search all collections except these + +**Example - Search specific collection:** +```json +{ + "name": "search", + "arguments": { + "vector": [0.1, 0.2, 0.3, ..., 0.5], + "collection_name": "code_embeddings", + "limit": 10 + } +} +``` + +**Example - Search all collections:** +```json +{ + "name": "search", + "arguments": { + "vector": [0.1, 0.2, 0.3, ..., 0.5], + "limit": 10 + } +} +``` + +**Example - Search with exclusions:** +```json +{ + "name": "search", + "arguments": { + "vector": [0.1, 0.2, 0.3, ..., 0.5], + "exclude_collections": ["test_data", "archived"], + "limit": 10 + } +} +``` + +**Response:** +```json +[ + { + "vector_id": 123, + "collection_name": "code_embeddings", + "metadata": { + "file": "main.go", + "line": 42 + }, + "distance": 0.234 + }, + { + "vector_id": 456, + "collection_name": "documentation", + "metadata": { + "doc": "api.md", + "section": "authentication" + }, + "distance": 0.456 + } +] +``` + +Distance is cosine distance (lower = more similar). + +## Using with MCP Clients + +### Claude Desktop + +Add to your Claude Desktop configuration (`~/Library/Application Support/Claude/claude_desktop_config.json` on macOS): + +```json +{ + "mcpServers": { + "sqlite-vec": { + "command": "docker", + "args": [ + "run", + "--rm", + "-i", + "-v", "/path/to/data:/data", + "-e", "DB_PATH=/data/vectors.db", + "-e", "VECTOR_DIMENSION=1536", + "jimclark106/vector-db:latest" + ] + } + } +} +``` + +### Docker MCP Gateway + +Add to your MCP Gateway catalog: + +```yaml +- name: sqlite-vec + description: Vector database for semantic search + image: jimclark106/vector-db:latest + env: + VECTOR_DIMENSION: "1536" + DB_PATH: "/data/vectors.db" + volumes: + - ./data:/data +``` + +### Direct Usage (for testing) + +```bash +# Start the server +docker run --rm -i \ + -v $(pwd)/data:/data \ + -e VECTOR_DIMENSION=1536 \ + jimclark106/vector-db:latest + +# The server will communicate via stdio using the MCP protocol +# Send MCP requests as JSON-RPC messages +``` + +## Data Persistence + +The SQLite database is stored in the `./data` directory (mounted as a volume). This ensures your vectors persist across container restarts. + +To backup your data: +```bash +# Copy the database file +cp ./data/vectors.db ./backup/vectors-$(date +%Y%m%d).db +``` + +To reset/clear all data: +```bash +# Stop the container and remove the database +docker stop sqlite-vec-mcp +rm -f ./data/vectors.db +``` + +## Architecture + +- **Protocol**: Model Context Protocol (MCP) over stdio +- **Database**: SQLite with sqlite-vec extension +- **SDK**: Official golang MCP SDK (`github.com/modelcontextprotocol/go-sdk`) +- **Vector Storage**: Vectors stored as BLOBs using `vec_f32()` +- **Search**: Cosine distance similarity using `vec_distance_cosine()` +- **Metadata**: Flexible JSON storage per vector + +## Development + +### Using Make + +The project includes a Makefile for common tasks: + +```bash +# Show all available commands +make help + +# Build Docker image (multi-platform) +make build + +# Build and push to Docker registry +make build-push + +# Run the server locally in Docker +make run + +# Build binary locally +make build-local + +# Run tests +make test + +# Run linters +make lint + +# Format code +make fmt + +# Download dependencies +make deps + +# Clean up artifacts +make clean +``` + +### Manual Build + +```bash +# Install dependencies +go mod download + +# Build the binary +CGO_ENABLED=1 go build -tags "sqlite_extensions" -o sqlite-vec-mcp main.go + +# Run locally (for testing) +./sqlite-vec-mcp +``` + +### Running Tests + +```bash +make test +# or +go test ./... +``` + +### Linting + +```bash +make lint +# or +golangci-lint run ./... +``` + +### Building for Docker Registry + +```bash +# Build for multiple platforms and push to jimclark106/vector-db +make build-push + +# Or with a specific tag +TAG=v1.0.0 make build-push +``` + +## Performance Notes + +- SQLite is single-writer, so concurrent writes are serialized +- Suitable for moderate workloads (thousands to hundreds of thousands of vectors) +- For larger scale (millions of vectors), consider Qdrant, Weaviate, or Pinecone +- Search performance is linear O(n) - no index structure yet in sqlite-vec +- MCP stdio protocol is efficient for single-client scenarios + +## Platform Support + +**Supported Platform**: `linux/amd64` only + +The Docker image is currently built for `linux/amd64` (x86_64) only. The sqlite-vec prebuilt binaries for ARM64 are 32-bit and incompatible with 64-bit ARM systems. + +For development on Apple Silicon (M1/M2/M3) Macs, you can: +- Deploy to a linux/amd64 environment (cloud, CI/CD, production servers) +- Build sqlite-vec from source for native ARM64 (advanced, not covered here) +- Use x86_64 emulation (may have compatibility issues) + +## Troubleshooting + +### Server won't start + +Check logs: +```bash +docker logs sqlite-vec-mcp +``` + +Verify sqlite-vec extension is loaded: +```bash +docker run --rm -it --platform linux/amd64 jimclark106/vector-db:latest sqlite3 /tmp/test.db "SELECT vec_version();" +``` + +### "unsupported relocation type" or "Exec format error" + +This error indicates an architecture mismatch. Ensure you're running on a linux/amd64 system or using proper platform emulation: +```bash +docker run --platform linux/amd64 ... +``` + +### Dimension mismatch errors + +Ensure all vectors have the same dimension as specified in `VECTOR_DIMENSION` environment variable. The dimension must be consistent across all operations. + +### MCP connection issues + +- Ensure the server is running in stdio mode (not HTTP) +- Check that stdin/stdout are not buffered or redirected +- Verify the MCP client is sending valid JSON-RPC requests + +## License + +This example is provided as-is for educational and development purposes. diff --git a/examples/sqlite-vec/docker-compose.yml b/examples/sqlite-vec/docker-compose.yml new file mode 100644 index 00000000..11479f97 --- /dev/null +++ b/examples/sqlite-vec/docker-compose.yml @@ -0,0 +1,22 @@ +version: '3.8' + +services: + vector-db: + platform: linux/amd64 + build: + context: . + dockerfile: Dockerfile + container_name: sqlite-vec-mcp + stdin_open: true # Keep stdin open for MCP stdio protocol + tty: false # No TTY needed for MCP + volumes: + - ./data:/data + environment: + - DB_PATH=/data/vectors.db + - VECTOR_DIMENSION=1536 # Change based on your embedding model + restart: unless-stopped + # Note: This is an MCP stdio server - it communicates via stdin/stdout + # No health check or ports needed for stdio-based MCP servers + +volumes: + vector-data: diff --git a/examples/sqlite-vec/go.mod b/examples/sqlite-vec/go.mod new file mode 100644 index 00000000..c0fabdcd --- /dev/null +++ b/examples/sqlite-vec/go.mod @@ -0,0 +1,11 @@ +module github.com/docker/mcp-gateway/examples/sqlite-vec + +go 1.24 + +require ( + github.com/google/jsonschema-go v0.3.0 + github.com/mattn/go-sqlite3 v1.14.22 + github.com/modelcontextprotocol/go-sdk v1.0.0 +) + +require github.com/yosida95/uritemplate/v3 v3.0.2 // indirect diff --git a/examples/sqlite-vec/main.go b/examples/sqlite-vec/main.go new file mode 100644 index 00000000..dc4be880 --- /dev/null +++ b/examples/sqlite-vec/main.go @@ -0,0 +1,655 @@ +package main + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "fmt" + "log" + "os" + "os/signal" + "strconv" + "strings" + "syscall" + + "github.com/google/jsonschema-go/jsonschema" + "github.com/mattn/go-sqlite3" + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +const ( + // Default vector dimension (e.g., OpenAI ada-002 = 1536) + defaultDimension = 1536 +) + +func init() { + // Register sqlite3 driver with extension loading enabled + sql.Register("sqlite3_with_extensions", + &sqlite3.SQLiteDriver{ + ConnectHook: func(conn *sqlite3.SQLiteConn) error { + extPath := os.Getenv("VEC_EXT_PATH") + if extPath == "" { + extPath = "/usr/local/lib/sqlite/vec0" + } + return conn.LoadExtension(extPath, "sqlite3_vec_init") + }, + }) +} + +type VectorServer struct { + db *sql.DB + dim int +} + +// API Models +type Collection struct { + ID int `json:"id"` + Name string `json:"name"` + CreatedAt string `json:"created_at"` +} + +type Vector struct { + ID int `json:"id"` + CollectionID int `json:"collection_id"` + Metadata json.RawMessage `json:"metadata,omitempty"` + CreatedAt string `json:"created_at"` +} + +type AddVectorRequest struct { + CollectionName string `json:"collection_name"` + Vector []float32 `json:"vector"` + Metadata json.RawMessage `json:"metadata,omitempty"` +} + +type SearchRequest struct { + Vector []float32 `json:"vector"` + Limit int `json:"limit,omitempty"` + CollectionName string `json:"collection_name,omitempty"` // Deprecated: use collection_names instead + CollectionNames []string `json:"collection_names,omitempty"` // Search in specific collections (empty = search all) + ExcludeCollections []string `json:"exclude_collections,omitempty"` // Collections to exclude from search +} + +type SearchResult struct { + VectorID int `json:"vector_id"` + CollectionName string `json:"collection_name"` + Metadata json.RawMessage `json:"metadata,omitempty"` + Distance float64 `json:"distance"` +} + +func main() { + dimension := defaultDimension + if dim := os.Getenv("VECTOR_DIMENSION"); dim != "" { + if d, err := strconv.Atoi(dim); err == nil { + dimension = d + } + } + + dbPath := os.Getenv("DB_PATH") + if dbPath == "" { + dbPath = "/data/vectors.db" + } + + // Setup signal handling + ctx, done := signal.NotifyContext(context.Background(), + syscall.SIGINT, syscall.SIGTERM) + defer done() + + // Direct logs to stderr (stdout is used for MCP protocol) + log.SetOutput(os.Stderr) + + // Open database with custom driver that has vec extension loaded + db, err := sql.Open("sqlite3_with_extensions", dbPath) + if err != nil { + log.Fatal("Failed to open database:", err) + } + defer db.Close() + + // Enable foreign keys + if _, err := db.Exec("PRAGMA foreign_keys = ON"); err != nil { + log.Fatal("Failed to enable foreign keys:", err) + } + + vs := &VectorServer{db: db, dim: dimension} + + // Initialize schema if needed + if err := vs.initSchema(); err != nil { + log.Fatal("Failed to initialize schema:", err) + } + + // Create MCP server + server := mcp.NewServer( + &mcp.Implementation{ + Name: "sqlite-vec", + Version: "1.0.0", + }, + &mcp.ServerOptions{ + HasTools: true, + }, + ) + + // Register all tools + vs.registerTools(server) + + // Create transport with logging + transport := &mcp.LoggingTransport{ + Transport: &mcp.StdioTransport{}, + Writer: os.Stderr, + } + + // Run server + errCh := make(chan error, 1) + go func() { + log.Printf("[INFO] MCP sqlite-vec server starting (dimension=%d)", dimension) + defer log.Print("[INFO] MCP sqlite-vec server stopped") + + if err := server.Run(ctx, transport); err != nil && !errors.Is(err, mcp.ErrConnectionClosed) { + select { + case errCh <- err: + default: + } + } + }() + + // Wait for error or context cancellation + select { + case err := <-errCh: + log.Printf("[ERROR] Server failed: %s", err) + os.Exit(1) + case <-ctx.Done(): + log.Print("[INFO] Shutdown signal received") + } +} + +func (vs *VectorServer) registerTools(server *mcp.Server) { + // Tool 1: list_collections + server.AddTool( + &mcp.Tool{ + Name: "list_collections", + Description: "List all vector collections in the database", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{}, + }, + OutputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "collections": { + Type: "array", + Items: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "id": { + Type: "integer", + Description: "Unique identifier for the collection", + }, + "name": { + Type: "string", + Description: "Name of the collection", + }, + "created_at": { + Type: "string", + Description: "Timestamp when the collection was created", + }, + }, + Required: []string{"id", "name", "created_at"}, + }, + }, + }, + Required: []string{"collections"}, + }, + }, + vs.handleListCollections, + ) + + // Tool 2: create_collection + server.AddTool( + &mcp.Tool{ + Name: "create_collection", + Description: "Create a new vector collection", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "Name of the collection to create", + }, + }, + Required: []string{"name"}, + }, + }, + vs.handleCreateCollection, + ) + + // Tool 3: delete_collection + server.AddTool( + &mcp.Tool{ + Name: "delete_collection", + Description: "Delete a collection and all its vectors (cascade delete)", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "Name of the collection to delete", + }, + }, + Required: []string{"name"}, + }, + }, + vs.handleDeleteCollection, + ) + + // Tool 4: add_vector + server.AddTool( + &mcp.Tool{ + Name: "add_vector", + Description: "Add a vector to a collection (creates collection if it doesn't exist)", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "collection_name": { + Type: "string", + Description: "Name of the collection", + }, + "vector": { + Type: "array", + Description: fmt.Sprintf("Vector embedding (must be %d dimensions)", vs.dim), + Items: &jsonschema.Schema{ + Type: "number", + }, + }, + "metadata": { + Type: "object", + Description: "Optional metadata as JSON object", + }, + }, + Required: []string{"collection_name", "vector"}, + }, + }, + vs.handleAddVector, + ) + + // Tool 5: delete_vector + server.AddTool( + &mcp.Tool{ + Name: "delete_vector", + Description: "Delete a vector by its ID", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "id": { + Type: "integer", + Description: "ID of the vector to delete", + }, + }, + Required: []string{"id"}, + }, + }, + vs.handleDeleteVector, + ) + + // Tool 6: search + server.AddTool( + &mcp.Tool{ + Name: "search", + Description: "Search for similar vectors using cosine distance", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "vector": { + Type: "array", + Description: fmt.Sprintf("Query vector (must be %d dimensions)", vs.dim), + Items: &jsonschema.Schema{ + Type: "number", + }, + }, + "limit": { + Type: "integer", + Description: "Maximum number of results to return (default: 10)", + }, + "collection_name": { + Type: "string", + Description: "Optional: search only within this single collection (deprecated, use collection_names instead)", + }, + "collection_names": { + Type: "array", + Description: "Optional: search only within these collections. If empty, searches all collections.", + Items: &jsonschema.Schema{ + Type: "string", + }, + }, + "exclude_collections": { + Type: "array", + Description: "Optional: search all collections except these", + Items: &jsonschema.Schema{ + Type: "string", + }, + }, + }, + Required: []string{"vector"}, + }, + }, + vs.handleSearch, + ) +} + +func (vs *VectorServer) initSchema() error { + schema := ` + CREATE TABLE IF NOT EXISTS collections ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + + CREATE TABLE IF NOT EXISTS vectors ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + collection_id INTEGER NOT NULL, + vector_blob BLOB NOT NULL, + metadata TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (collection_id) REFERENCES collections(id) ON DELETE CASCADE + ); + + CREATE INDEX IF NOT EXISTS idx_vectors_collection ON vectors(collection_id); + CREATE INDEX IF NOT EXISTS idx_collections_name ON collections(name); + ` + _, err := vs.db.Exec(schema) + return err +} + +// Tool handlers + +func (vs *VectorServer) handleListCollections(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + rows, err := vs.db.Query("SELECT id, name, created_at FROM collections ORDER BY created_at DESC") + if err != nil { + return nil, fmt.Errorf("failed to query collections: %w", err) + } + defer rows.Close() + + var collections []Collection + for rows.Next() { + var c Collection + if err := rows.Scan(&c.ID, &c.Name, &c.CreatedAt); err != nil { + return nil, fmt.Errorf("failed to scan collection: %w", err) + } + collections = append(collections, c) + } + + result := map[string]any{ + "collections": collections, + } + + resultJSON, err := json.MarshalIndent(result, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to marshal collections: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: string(resultJSON)}, + }, + }, nil +} + +func (vs *VectorServer) handleCreateCollection(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + var params struct { + Name string `json:"name"` + } + + if err := parseArguments(req, ¶ms); err != nil { + return nil, err + } + + if params.Name == "" { + return nil, fmt.Errorf("collection name is required") + } + + result, err := vs.db.Exec("INSERT INTO collections (name) VALUES (?)", params.Name) + if err != nil { + return nil, fmt.Errorf("collection already exists or database error: %w", err) + } + + id, _ := result.LastInsertId() + response := map[string]any{"id": id, "name": params.Name} + + resultJSON, _ := json.MarshalIndent(response, "", " ") + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: string(resultJSON)}, + }, + }, nil +} + +func (vs *VectorServer) handleDeleteCollection(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + var params struct { + Name string `json:"name"` + } + + if err := parseArguments(req, ¶ms); err != nil { + return nil, err + } + + result, err := vs.db.Exec("DELETE FROM collections WHERE name = ?", params.Name) + if err != nil { + return nil, fmt.Errorf("failed to delete collection: %w", err) + } + + rows, _ := result.RowsAffected() + if rows == 0 { + return nil, fmt.Errorf("collection not found: %s", params.Name) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: fmt.Sprintf("Collection '%s' deleted successfully", params.Name)}, + }, + }, nil +} + +func (vs *VectorServer) handleAddVector(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + var params AddVectorRequest + + if err := parseArguments(req, ¶ms); err != nil { + return nil, err + } + + if len(params.Vector) != vs.dim { + return nil, fmt.Errorf("vector dimension mismatch: expected %d, got %d", vs.dim, len(params.Vector)) + } + + // Get or create collection + var collectionID int + err := vs.db.QueryRow("SELECT id FROM collections WHERE name = ?", params.CollectionName).Scan(&collectionID) + if err == sql.ErrNoRows { + result, err := vs.db.Exec("INSERT INTO collections (name) VALUES (?)", params.CollectionName) + if err != nil { + return nil, fmt.Errorf("failed to create collection: %w", err) + } + id, _ := result.LastInsertId() + collectionID = int(id) + } else if err != nil { + return nil, fmt.Errorf("database error: %w", err) + } + + // Convert float32 slice to JSON array for vec_f32 + vectorJSON, _ := json.Marshal(params.Vector) + + metadata := params.Metadata + if metadata == nil { + metadata = json.RawMessage("{}") + } + + result, err := vs.db.Exec( + "INSERT INTO vectors (collection_id, vector_blob, metadata) VALUES (?, vec_f32(?), ?)", + collectionID, string(vectorJSON), string(metadata), + ) + if err != nil { + return nil, fmt.Errorf("failed to insert vector: %w", err) + } + + id, _ := result.LastInsertId() + response := map[string]any{ + "id": id, + "collection_id": collectionID, + } + + resultJSON, _ := json.MarshalIndent(response, "", " ") + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: string(resultJSON)}, + }, + }, nil +} + +func (vs *VectorServer) handleDeleteVector(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + var params struct { + ID int `json:"id"` + } + + if err := parseArguments(req, ¶ms); err != nil { + return nil, err + } + + result, err := vs.db.Exec("DELETE FROM vectors WHERE id = ?", params.ID) + if err != nil { + return nil, fmt.Errorf("failed to delete vector: %w", err) + } + + rows, _ := result.RowsAffected() + if rows == 0 { + return nil, fmt.Errorf("vector not found: %d", params.ID) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: fmt.Sprintf("Vector %d deleted successfully", params.ID)}, + }, + }, nil +} + +func (vs *VectorServer) handleSearch(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + var params SearchRequest + + if err := parseArguments(req, ¶ms); err != nil { + return nil, err + } + + if len(params.Vector) != vs.dim { + return nil, fmt.Errorf("vector dimension mismatch: expected %d, got %d", vs.dim, len(params.Vector)) + } + + if params.Limit == 0 { + params.Limit = 10 + } + + // Support backward compatibility: if collection_name is set, add it to collection_names + if params.CollectionName != "" && len(params.CollectionNames) == 0 { + params.CollectionNames = []string{params.CollectionName} + } + + vectorJSON, _ := json.Marshal(params.Vector) + + var rows *sql.Rows + var err error + + if len(params.CollectionNames) > 0 { + // Search within specific collections using IN clause + placeholders := make([]string, len(params.CollectionNames)) + args := []any{string(vectorJSON)} + for i, name := range params.CollectionNames { + placeholders[i] = "?" + args = append(args, name) + } + args = append(args, params.Limit) + + query := fmt.Sprintf(` + SELECT v.id, c.name, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance + FROM vectors v + JOIN collections c ON v.collection_id = c.id + WHERE c.name IN (%s) + ORDER BY distance + LIMIT ? + `, strings.Join(placeholders, ",")) + + rows, err = vs.db.Query(query, args...) + } else if len(params.ExcludeCollections) > 0 { + // Search across all collections EXCEPT the excluded ones + placeholders := make([]string, len(params.ExcludeCollections)) + args := []any{string(vectorJSON)} + for i, name := range params.ExcludeCollections { + placeholders[i] = "?" + args = append(args, name) + } + args = append(args, params.Limit) + + query := fmt.Sprintf(` + SELECT v.id, c.name, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance + FROM vectors v + JOIN collections c ON v.collection_id = c.id + WHERE c.name NOT IN (%s) + ORDER BY distance + LIMIT ? + `, strings.Join(placeholders, ",")) + + rows, err = vs.db.Query(query, args...) + } else { + // Search across all collections + rows, err = vs.db.Query(` + SELECT v.id, c.name, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance + FROM vectors v + JOIN collections c ON v.collection_id = c.id + ORDER BY distance + LIMIT ? + `, string(vectorJSON), params.Limit) + } + + if err != nil { + return nil, fmt.Errorf("search failed: %w", err) + } + defer rows.Close() + + var results []SearchResult + for rows.Next() { + var r SearchResult + var metadata sql.NullString + if err := rows.Scan(&r.VectorID, &r.CollectionName, &metadata, &r.Distance); err != nil { + return nil, fmt.Errorf("failed to scan result: %w", err) + } + if metadata.Valid && metadata.String != "" { + r.Metadata = json.RawMessage(metadata.String) + } + results = append(results, r) + } + + resultJSON, err := json.MarshalIndent(results, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to marshal results: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{ + &mcp.TextContent{Text: string(resultJSON)}, + }, + }, nil +} + +// Helper function to parse arguments from CallToolRequest +func parseArguments(req *mcp.CallToolRequest, params any) error { + if req.Params.Arguments == nil { + return fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, params); err != nil { + return fmt.Errorf("failed to parse arguments: %w", err) + } + + return nil +} diff --git a/examples/sqlite-vec/schema.sql b/examples/sqlite-vec/schema.sql new file mode 100644 index 00000000..574778c5 --- /dev/null +++ b/examples/sqlite-vec/schema.sql @@ -0,0 +1,56 @@ +-- sqlite-vec schema for collections and vector storage +-- Load the vector extension +.load /usr/local/lib/sqlite/vec0 + +-- Collections table: organize vectors into named groups +-- All collections use the same embedding model dimension +CREATE TABLE IF NOT EXISTS collections ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Vectors table: store embeddings with metadata +CREATE TABLE IF NOT EXISTS vectors ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + collection_id INTEGER NOT NULL, + vector_blob BLOB NOT NULL, + metadata TEXT, -- JSON for flexible metadata storage + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (collection_id) REFERENCES collections(id) ON DELETE CASCADE +); + +-- Indexes for performance +CREATE INDEX IF NOT EXISTS idx_vectors_collection ON vectors(collection_id); +CREATE INDEX IF NOT EXISTS idx_collections_name ON collections(name); + +-- Example queries: +-- +-- Insert a collection: +-- INSERT INTO collections (name) VALUES ('code_embeddings'); +-- +-- Insert a vector (example with 3D vector): +-- INSERT INTO vectors (collection_id, vector_blob, metadata) +-- VALUES (1, vec_f32(json_array(0.1, 0.2, 0.3)), '{"file": "main.go", "line": 42}'); +-- +-- Search within a collection (top 10 similar vectors): +-- SELECT v.id, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance +-- FROM vectors v +-- WHERE v.collection_id = ? +-- ORDER BY distance +-- LIMIT 10; +-- +-- Search across ALL collections: +-- SELECT c.name as collection, v.id, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance +-- FROM vectors v +-- JOIN collections c ON v.collection_id = c.id +-- ORDER BY distance +-- LIMIT 10; +-- +-- Search across all collections EXCEPT specific ones: +-- SELECT c.name as collection, v.id, v.metadata, vec_distance_cosine(v.vector_blob, vec_f32(?)) as distance +-- FROM vectors v +-- JOIN collections c ON v.collection_id = c.id +-- WHERE c.name NOT IN ('collection1', 'collection2') +-- ORDER BY distance +-- LIMIT 10; diff --git a/examples/sqlite-vec/test.sh b/examples/sqlite-vec/test.sh new file mode 100755 index 00000000..9e3b86e1 --- /dev/null +++ b/examples/sqlite-vec/test.sh @@ -0,0 +1,168 @@ +#!/bin/bash + +# Test script for SQLite-Vec Vector Database Server +# Demonstrates all API endpoints + +set -e + +BASE_URL="http://localhost:8080" +VECTOR_DIM=1536 + +echo "=== SQLite-Vec API Test Script ===" +echo "" + +# Helper function to generate a random vector +generate_vector() { + python3 -c "import json, random; print(json.dumps([random.random() for _ in range($VECTOR_DIM)]))" +} + +# 1. Health Check +echo "1. Testing health check..." +curl -s $BASE_URL/health | jq . +echo "" + +# 2. List Collections (should be empty initially) +echo "2. Listing collections (initially empty)..." +curl -s $BASE_URL/collections | jq . +echo "" + +# 3. Create Collections +echo "3. Creating collections..." +curl -s -X POST $BASE_URL/collections \ + -H "Content-Type: application/json" \ + -d '{"name": "code_embeddings"}' | jq . + +curl -s -X POST $BASE_URL/collections \ + -H "Content-Type: application/json" \ + -d '{"name": "documentation"}' | jq . +echo "" + +# 4. List Collections Again +echo "4. Listing collections after creation..." +curl -s $BASE_URL/collections | jq . +echo "" + +# 5. Add Vectors to code_embeddings collection +echo "5. Adding vectors to code_embeddings collection..." +VECTOR1=$(generate_vector) +curl -s -X POST $BASE_URL/vectors \ + -H "Content-Type: application/json" \ + -d "{ + \"collection_name\": \"code_embeddings\", + \"vector\": $VECTOR1, + \"metadata\": { + \"file\": \"server.go\", + \"function\": \"handleRequest\", + \"line\": 100 + } + }" | jq . + +VECTOR2=$(generate_vector) +curl -s -X POST $BASE_URL/vectors \ + -H "Content-Type: application/json" \ + -d "{ + \"collection_name\": \"code_embeddings\", + \"vector\": $VECTOR2, + \"metadata\": { + \"file\": \"client.go\", + \"function\": \"connect\", + \"line\": 50 + } + }" | jq . +echo "" + +# 6. Add Vectors to documentation collection +echo "6. Adding vectors to documentation collection..." +VECTOR3=$(generate_vector) +curl -s -X POST $BASE_URL/vectors \ + -H "Content-Type: application/json" \ + -d "{ + \"collection_name\": \"documentation\", + \"vector\": $VECTOR3, + \"metadata\": { + \"doc\": \"api.md\", + \"section\": \"authentication\" + } + }" | jq . + +VECTOR4=$(generate_vector) +curl -s -X POST $BASE_URL/vectors \ + -H "Content-Type: application/json" \ + -d "{ + \"collection_name\": \"documentation\", + \"vector\": $VECTOR4, + \"metadata\": { + \"doc\": \"deployment.md\", + \"section\": \"docker\" + } + }" | jq . +echo "" + +# 7. Search within a specific collection +echo "7. Searching within 'code_embeddings' collection..." +QUERY_VECTOR=$(generate_vector) +curl -s -X POST $BASE_URL/search \ + -H "Content-Type: application/json" \ + -d "{ + \"vector\": $QUERY_VECTOR, + \"collection_name\": \"code_embeddings\", + \"limit\": 5 + }" | jq . +echo "" + +# 8. Search across all collections +echo "8. Searching across ALL collections..." +curl -s -X POST $BASE_URL/search \ + -H "Content-Type: application/json" \ + -d "{ + \"vector\": $QUERY_VECTOR, + \"limit\": 10 + }" | jq . +echo "" + +# 9. Delete a specific vector +echo "9. Deleting vector with id=1..." +curl -s -X DELETE $BASE_URL/vectors/1 | jq . +echo "" + +# 10. Search again to verify deletion +echo "10. Searching again to verify vector deletion..." +curl -s -X POST $BASE_URL/search \ + -H "Content-Type: application/json" \ + -d "{ + \"vector\": $QUERY_VECTOR, + \"collection_name\": \"code_embeddings\", + \"limit\": 5 + }" | jq . +echo "" + +# 11. Delete entire collection +echo "11. Deleting 'documentation' collection..." +curl -s -X DELETE $BASE_URL/collections/documentation | jq . +echo "" + +# 12. List collections after deletion +echo "12. Listing collections after deletion..." +curl -s $BASE_URL/collections | jq . +echo "" + +# 13. Search to verify collection deletion +echo "13. Searching all collections (documentation should be gone)..." +curl -s -X POST $BASE_URL/search \ + -H "Content-Type: application/json" \ + -d "{ + \"vector\": $QUERY_VECTOR, + \"limit\": 10 + }" | jq . +echo "" + +echo "=== Test Complete ===" +echo "" +echo "Summary:" +echo " ✓ Health check" +echo " ✓ Collection creation" +echo " ✓ Vector insertion" +echo " ✓ Collection-specific search" +echo " ✓ Cross-collection search" +echo " ✓ Vector deletion" +echo " ✓ Collection deletion (cascade)" diff --git a/examples/tool_registrations/README.html b/examples/tool_registrations/README.html new file mode 100644 index 00000000..d042fcaa --- /dev/null +++ b/examples/tool_registrations/README.html @@ -0,0 +1,167 @@ +
+

Tool Registrations Serializer

+

This tool initializes a gateway with configured MCP servers and serializes their tool registrations to disk in JSON format.

+

Purpose

+

The tool registration serializer is useful for:

+
    +
  • Introspection: Understanding what tools are available across all enabled servers
  • +
  • Documentation: Generating tool catalogs for external use
  • +
  • Testing: Validating that tools are being registered correctly
  • +
  • Integration: Providing tool metadata to other systems that need to know about available tools
  • +
+

Usage

+

Basic Usage

+
# Serialize tool registrations from all enabled servers (in registry.yaml)
+go run main.go
+
+# Specify a custom output file
+go run main.go -output my-tools.json
+
+# Serialize only specific servers
+go run main.go -server filesystem -server postgres
+
+# Use custom configuration files
+go run main.go \
+  -catalog /path/to/catalog.yaml \
+  -registry /path/to/registry.yaml \
+  -config /path/to/config.yaml \
+  -output tools.json
+
+

Flags

+
    +
  • -catalog <path>: Path to MCP server catalog (default: docker-mcp.yaml)
  • +
  • -registry <path>: Path to registry file with enabled servers (default: registry.yaml)
  • +
  • -config <path>: Path to config file with server configurations (default: config.yaml)
  • +
  • -tools <path>: Path to tools config file (default: tools.yaml)
  • +
  • -secrets <path>: Path to secrets (default: docker-desktop)
  • +
  • -output <path>: Output file for tool registrations (default: tool-registrations.json)
  • +
  • -server <name>: Server name to include (can be repeated, omit to use all enabled servers)
  • +
+

Examples

+

Example 1: Export All Enabled Tools

+
cd ~/.docker/mcp
+go run /path/to/examples/tool_registrations/main.go
+
+

Output: tool-registrations.json with all tools from enabled servers

+

Example 2: Export Tools from Specific Servers

+
go run main.go \
+  -server filesystem \
+  -server postgres \
+  -server brave-search \
+  -output web-tools.json
+
+

Example 3: Use with Custom Paths

+
go run main.go \
+  -catalog ./my-catalog.yaml \
+  -registry ./my-registry.yaml \
+  -config ./my-config.yaml \
+  -output ./output/tools.json
+
+

Output Format

+

The tool generates a JSON file with the following structure:

+
{
+  "tool-name": {
+    "server_name": "server-name",
+    "tool": {
+      "name": "tool-name",
+      "description": "Tool description",
+      "inputSchema": {
+        "type": "object",
+        "properties": {
+          "param1": {
+            "type": "string",
+            "description": "Parameter description"
+          }
+        },
+        "required": ["param1"]
+      }
+    }
+  }
+}
+
+

Example Output

+
{
+  "list_directory": {
+    "server_name": "filesystem",
+    "tool": {
+      "name": "list_directory",
+      "description": "List contents of a directory",
+      "inputSchema": {
+        "type": "object",
+        "properties": {
+          "path": {
+            "type": "string",
+            "description": "Directory path to list"
+          }
+        },
+        "required": ["path"]
+      }
+    }
+  },
+  "read_file": {
+    "server_name": "filesystem",
+    "tool": {
+      "name": "read_file",
+      "description": "Read contents of a file",
+      "inputSchema": {
+        "type": "object",
+        "properties": {
+          "path": {
+            "type": "string",
+            "description": "File path to read"
+          }
+        },
+        "required": ["path"]
+      }
+    }
+  }
+}
+
+

How It Works

+
    +
  1. Gateway Initialization: Creates a gateway instance with the specified configuration
  2. +
  3. Configuration Loading: Reads server catalog, registry, and configuration files
  4. +
  5. Server Connection: Connects to each enabled MCP server
  6. +
  7. Tool Discovery: Lists all tools available from each server
  8. +
  9. Registration: Collects tool registrations from all servers
  10. +
  11. Serialization: Converts tool registrations to JSON (excluding non-serializable handler functions)
  12. +
  13. Output: Writes the JSON to the specified output file
  14. +
+

Notes

+
    +
  • The tool runs in "static" mode, so it won't pull Docker images
  • +
  • Handler functions are not serialized (they are runtime-only)
  • +
  • The tool respects the same configuration files as the main gateway
  • +
  • Use -server flags to limit which servers' tools are exported
  • +
  • Omitting -server will export tools from all enabled servers in registry.yaml
  • +
+

Integration Example

+

You can use this tool in scripts to generate tool documentation:

+
#!/bin/bash
+# Export tool registrations
+go run main.go -output tools.json
+
+# Generate markdown documentation from JSON
+jq -r 'to_entries[] | "## \(.value.tool.name)\n\n**Server**: \(.value.server_name)\n\n\(.value.tool.description)\n"' tools.json > TOOLS.md
+
+

Troubleshooting

+

Error: "reading configuration: no such file"

+

Make sure you're running from the correct directory or provide absolute paths to configuration files.

+

Error: "listing resources: unable to connect to server"

+

Ensure Docker is running and the specified servers are properly configured in your catalog and registry files.

+

Empty output file

+

Check that you have servers enabled in your registry.yaml file, or specify servers explicitly with -server flags.

+
+
\ No newline at end of file diff --git a/examples/tool_registrations/README.md b/examples/tool_registrations/README.md new file mode 100644 index 00000000..aea9749e --- /dev/null +++ b/examples/tool_registrations/README.md @@ -0,0 +1,183 @@ +# Tool Registrations Serializer + +This tool initializes a gateway with configured MCP servers and serializes their tool registrations to disk in JSON format. + +## Purpose + +The tool registration serializer is useful for: +- **Introspection**: Understanding what tools are available across all enabled servers +- **Documentation**: Generating tool catalogs for external use +- **Testing**: Validating that tools are being registered correctly +- **Integration**: Providing tool metadata to other systems that need to know about available tools + +## Usage + +### Basic Usage + +```bash +# Serialize tool registrations from all enabled servers (in registry.yaml) +go run main.go + +# Specify a custom output file +go run main.go -output my-tools.json + +# Serialize only specific servers +go run main.go -server filesystem -server postgres + +# Use custom configuration files +go run main.go \ + -catalog /path/to/catalog.yaml \ + -registry /path/to/registry.yaml \ + -config /path/to/config.yaml \ + -output tools.json +``` + +### Flags + +- `-catalog `: Path to MCP server catalog (default: `docker-mcp.yaml`) +- `-registry `: Path to registry file with enabled servers (default: `registry.yaml`) +- `-config `: Path to config file with server configurations (default: `config.yaml`) +- `-tools `: Path to tools config file (default: `tools.yaml`) +- `-secrets `: Path to secrets (default: `docker-desktop`) +- `-output `: Output file for tool registrations (default: `tool-registrations.json`) +- `-server `: Server name to include (can be repeated, omit to use all enabled servers) + +### Examples + +#### Example 1: Export All Enabled Tools + +```bash +cd ~/.docker/mcp +go run /path/to/examples/tool_registrations/main.go +``` + +Output: `tool-registrations.json` with all tools from enabled servers + +#### Example 2: Export Tools from Specific Servers + +```bash +go run main.go \ + -server filesystem \ + -server postgres \ + -server brave-search \ + -output web-tools.json +``` + +#### Example 3: Use with Custom Paths + +```bash +go run main.go \ + -catalog ./my-catalog.yaml \ + -registry ./my-registry.yaml \ + -config ./my-config.yaml \ + -output ./output/tools.json +``` + +## Output Format + +The tool generates a JSON file with the following structure: + +```json +{ + "tool-name": { + "server_name": "server-name", + "tool": { + "name": "tool-name", + "description": "Tool description", + "inputSchema": { + "type": "object", + "properties": { + "param1": { + "type": "string", + "description": "Parameter description" + } + }, + "required": ["param1"] + } + } + } +} +``` + +### Example Output + +```json +{ + "list_directory": { + "server_name": "filesystem", + "tool": { + "name": "list_directory", + "description": "List contents of a directory", + "inputSchema": { + "type": "object", + "properties": { + "path": { + "type": "string", + "description": "Directory path to list" + } + }, + "required": ["path"] + } + } + }, + "read_file": { + "server_name": "filesystem", + "tool": { + "name": "read_file", + "description": "Read contents of a file", + "inputSchema": { + "type": "object", + "properties": { + "path": { + "type": "string", + "description": "File path to read" + } + }, + "required": ["path"] + } + } + } +} +``` + +## How It Works + +1. **Gateway Initialization**: Creates a gateway instance with the specified configuration +2. **Configuration Loading**: Reads server catalog, registry, and configuration files +3. **Server Connection**: Connects to each enabled MCP server +4. **Tool Discovery**: Lists all tools available from each server +5. **Registration**: Collects tool registrations from all servers +6. **Serialization**: Converts tool registrations to JSON (excluding non-serializable handler functions) +7. **Output**: Writes the JSON to the specified output file + +## Notes + +- The tool runs in "static" mode, so it won't pull Docker images +- Handler functions are not serialized (they are runtime-only) +- The tool respects the same configuration files as the main gateway +- Use `-server` flags to limit which servers' tools are exported +- Omitting `-server` will export tools from all enabled servers in `registry.yaml` + +## Integration Example + +You can use this tool in scripts to generate tool documentation: + +```bash +#!/bin/bash +# Export tool registrations +go run main.go -output tools.json + +# Generate markdown documentation from JSON +jq -r 'to_entries[] | "## \(.value.tool.name)\n\n**Server**: \(.value.server_name)\n\n\(.value.tool.description)\n"' tools.json > TOOLS.md +``` + +## Troubleshooting + +### Error: "reading configuration: no such file" +Make sure you're running from the correct directory or provide absolute paths to configuration files. + +### Error: "listing resources: unable to connect to server" +Ensure Docker is running and the specified servers are properly configured in your catalog and registry files. + +### Empty output file +Check that you have servers enabled in your `registry.yaml` file, or specify servers explicitly with `-server` flags. diff --git a/examples/tool_registrations/assets/docker-mark-blue.svg b/examples/tool_registrations/assets/docker-mark-blue.svg new file mode 100644 index 00000000..eba6cc41 --- /dev/null +++ b/examples/tool_registrations/assets/docker-mark-blue.svg @@ -0,0 +1,12 @@ + + + + + + + \ No newline at end of file diff --git a/examples/tool_registrations/config.yaml b/examples/tool_registrations/config.yaml new file mode 100644 index 00000000..1bb9f340 --- /dev/null +++ b/examples/tool_registrations/config.yaml @@ -0,0 +1,36 @@ +arxiv-mcp-server: + storage_path: /Users/slim/arxiv +desktop-commander: + paths: + - /Users/slim +dockerhub: + username: jimclark106 +filesystem: + paths: + - /Users/slim +gmail-mcp: + email_address: slimslenderslacks@gmail.com +kubectl-mcp-server: + kubeconfig: /Users/slim/.kube/config +kubernetes: + config_path: /Users/slim +markdownify: + paths: + - /Users/slim +markitdown: + paths: + - /Users/slim +openapi-schema: + SchemaPath: /Users/slim +resend: + reply_to: slimslenderslacks@gmail.com + sender: slimslenderslacks@gmail.com +rust-mcp-filesystem: + allow_write: true + allowed_directories: + - /Users/slim +slack: + team_id: E7UHBTE03 + channel_ids: C08J27QSJJJ +elevenlabs: + data: /Users/slim/elevenlabs diff --git a/examples/tool_registrations/embeddings.md b/examples/tool_registrations/embeddings.md new file mode 100644 index 00000000..f945b7c2 --- /dev/null +++ b/examples/tool_registrations/embeddings.md @@ -0,0 +1,123 @@ +--- +theme: gaia +_class: lead +paginate: true +backgroundColor: #fff +backgroundImage: url('https://marp.app/assets/hero-background.svg') +--- + + + +![bg left:40% 80%](assets/docker-mark-blue.svg) + +# **Dynamic Tools** + +**mcp-find**: Tool Embeddings + +https://github.com/docker/mcp-gateway + +--- + +# Scenario + +* 41 _active_ servers +* => 335 tools +* => 209K tokens of tool description / request +* => need to be careful at $1.25/1M tokens + +--- + +# Tool Broker + +
+ sequenceDiagram + Agent->>Gateway: tools/list + Gateway-->>Agent: [empty] + Agent->>Gateway: mcp-find(context) + Gateway-->>Agent: [tools] + Agent->>Gateway: mcp-exec +
+ +--- + +# Improve **mcp-find** + +* currently using a keyword search on an in-memory index + +--- + +#### Dynamic Embeddings + +
+ flowchart LR + VectorStore["`**VectorStore** (sqlite-vec)`"] + Gateway-->VectorStore + Gateway-->EmbeddingModel + Gateway-->SummaryModel +
+ +* generate embeddings on the fly +* tool definitions are not always static + +--- + +# Dynamic Embeddings + +| Model | time(ms)/tool | dim | ctx len | size | Notes | +| :--- | :-- | :--- | :--- | :--- | :---- | +| DMR - embeddinggemma (302M) | 4871 | 768 | 2048 | 307M | needs tool summarization | +| DMR - qwen3-embedding (4B) | 920 | 2560 | 40960 | 2.32G | can embed un-summarized tool def | +| GPT (text-embedding-3-small) | 307 | 1536 | 8191 | - | can embed un-summarized tool def | +| DMR - nomic (137M) | | 768 | 2048 | 0.5G | needs tool summarization | + +* 40 servers will be about 4 megs for larger vec dimensions like qwen3. Roughly half that for text-embedding-3-small, and half again for the smaller dimensions. + +Pre-summary to use smaller models + +* let's look at gemma3 first. + * can't be used for embeddings - can summarize 4096 context but still too small +* + +--- + +# Current: mcp-find/mcp-exec + +
+ sequenceDiagram + Agent->>Gateway: tools/list + Gateway-->>Agent: [empty] + Agent->>Gateway: mcp-find(context) + Gateway-->>Agent: [tools] + Agent->>Gateway: mcp-exec +
+ +--- + +# Custom Agent Loop + +
+ sequenceDiagram + Agent->>Gateway: tools/list + Gateway-->>Agent: [empty] + Agent->>Gateway: mcp-find(context) + Agent-->>Agent: update-tool-list + Agent->>Gateway: tools/call +
+ +--- + +# Next Steps + +1. compare `mcp-find/mcp-exec` with `custom agent loop` => blog + * community engagement: **mcp-exec** is weird +2. explore distributing static embeddings + * just for our catalog? + +--- + +# Marp MCP summary + +This slide deck was authored from [8f63ff759892d9b1d591e03e3d2e2dcbe1387012](https://github.com/docker/mcp-gateway/commits/main/). diff --git a/examples/tool_registrations/main.go b/examples/tool_registrations/main.go new file mode 100644 index 00000000..0106cd56 --- /dev/null +++ b/examples/tool_registrations/main.go @@ -0,0 +1,188 @@ +package main + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "os" + "path/filepath" + + "github.com/docker/cli/cli/command" + "github.com/docker/cli/cli/flags" + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/cmd/docker-mcp/catalog" + "github.com/docker/mcp-gateway/pkg/docker" + "github.com/docker/mcp-gateway/pkg/gateway" + "github.com/docker/mcp-gateway/pkg/log" +) + +// SerializableToolRegistration is a JSON-serializable version of gateway.ToolRegistration +// Since mcp.ToolHandler is a function and can't be serialized, we exclude it +type SerializableToolRegistration struct { + ServerName string `json:"server_name"` + ServerTitle string `json:"server_title,omitempty"` + ServerDescription string `json:"server_description,omitempty"` + Tool *mcp.Tool `json:"tool"` +} + +func main() { + var ( + catalogPath = flag.String("catalog", catalog.DockerCatalogFilename, "Path to MCP server catalog") + registryPath = flag.String("registry", "registry.yaml", "Path to registry file") + configPath = flag.String("config", "config.yaml", "Path to config file") + toolsPath = flag.String("tools", "tools.yaml", "Path to tools config file") + secretsPath = flag.String("secrets", "docker-desktop", "Path to secrets") + outputPath = flag.String("output", "tool-registrations.json", "Output file for tool registrations") + static = flag.Bool("static", false, "Don't pull or start Docker containers") + servers stringSlice + ) + flag.Var(&servers, "server", "Server name to include (can be specified multiple times, empty = all enabled servers)") + flag.Parse() + + if err := run( + *catalogPath, + *registryPath, + *configPath, + *toolsPath, + *secretsPath, + *outputPath, + *static, + []string(servers), + ); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } +} + +func run(catalogPath, registryPath, configPath, toolsPath, secretsPath, outputPath string, static bool, serverNames []string) error { + ctx := context.Background() + + // Initialize Docker CLI + dockerCli, err := command.NewDockerCli() + if err != nil { + return fmt.Errorf("creating docker CLI: %w", err) + } + if err := dockerCli.Initialize(&flags.ClientOptions{}); err != nil { + return fmt.Errorf("initializing docker CLI: %w", err) + } + + // Initialize Docker client + dockerClient := docker.NewClient(dockerCli) + + // Create gateway configuration + config := gateway.Config{ + ServerNames: serverNames, + CatalogPath: []string{catalogPath}, + RegistryPath: []string{registryPath}, + ConfigPath: []string{configPath}, + ToolsPath: []string{toolsPath}, + SecretsPath: secretsPath, + Options: gateway.Options{ + Cpus: 1, + Memory: "2Gb", + Transport: "stdio", + LogCalls: false, + BlockSecrets: false, + Verbose: true, + Static: static, + Watch: false, + }, + } + + log.Log("Creating gateway...") + g := gateway.NewGateway(config, dockerClient) + + // Read configuration + log.Log("Reading configuration...") + configuration, _, stopConfigWatcher, err := g.Configurator().Read(ctx) + if err != nil { + return fmt.Errorf("reading configuration: %w", err) + } + defer func() { _ = stopConfigWatcher() }() + + // Pull and verify Docker images (unless static mode is enabled) + if !static { + log.Log("Pulling Docker images...") + if err := g.PullAndVerify(ctx, configuration); err != nil { + return fmt.Errorf("pulling and verifying images: %w", err) + } + } + + // Initialize MCP server (required for reloadConfiguration) + log.Log("Initializing MCP server...") + mcpServer := mcp.NewServer(&mcp.Implementation{ + Name: "Tool Registration Extractor", + Version: "1.0.0", + }, &mcp.ServerOptions{ + HasPrompts: true, + HasResources: true, + HasTools: true, + }) + g.SetMCPServer(mcpServer) + + // Reload configuration to populate tool registrations + log.Log("Loading tool registrations...") + if err := g.ReloadConfiguration(ctx, configuration, nil, nil); err != nil { + return fmt.Errorf("reloading configuration: %w", err) + } + + // Get tool registrations + toolRegistrations := g.GetToolRegistrations() + log.Log(fmt.Sprintf("Found %d tool registrations", len(toolRegistrations))) + + // Convert to serializable format + serializableRegs := make(map[string]SerializableToolRegistration, len(toolRegistrations)) + for name, reg := range toolRegistrations { + // Look up server configuration to get description and title + serverConfig, _, found := configuration.Find(reg.ServerName) + + entry := SerializableToolRegistration{ + ServerName: reg.ServerName, + Tool: reg.Tool, + } + + if found && serverConfig != nil { + entry.ServerTitle = serverConfig.Spec.Title + entry.ServerDescription = serverConfig.Spec.Description + } + + serializableRegs[name] = entry + } + + // Serialize to JSON + log.Log(fmt.Sprintf("Writing tool registrations to %s...", outputPath)) + data, err := json.MarshalIndent(serializableRegs, "", " ") + if err != nil { + return fmt.Errorf("marshaling tool registrations: %w", err) + } + + // Ensure output directory exists + outputDir := filepath.Dir(outputPath) + if outputDir != "." && outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("creating output directory: %w", err) + } + } + + // Write to file + if err := os.WriteFile(outputPath, data, 0o644); err != nil { + return fmt.Errorf("writing output file: %w", err) + } + + log.Log(fmt.Sprintf("Successfully wrote %d tool registrations to %s", len(serializableRegs), outputPath)) + return nil +} + +// stringSlice implements flag.Value for repeated string flags +type stringSlice []string + +func (s *stringSlice) String() string { + return fmt.Sprintf("%v", *s) +} + +func (s *stringSlice) Set(value string) error { + *s = append(*s, value) + return nil +} diff --git a/examples/tool_registrations/package-lock.json b/examples/tool_registrations/package-lock.json new file mode 100644 index 00000000..5b657a42 --- /dev/null +++ b/examples/tool_registrations/package-lock.json @@ -0,0 +1,1811 @@ +{ + "name": "tool_registrations", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@marp-team/marpit": "^3.2.0", + "markdown-it-mermaid": "^0.2.5" + }, + "devDependencies": { + "@marp-team/marp-cli": "^4.2.3" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@csstools/postcss-is-pseudo-class": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-5.0.3.tgz", + "integrity": "sha512-jS/TY4SpG4gszAtIg7Qnf3AS2pjcUM5SzxpApOrlndMeGhIbaTzWBzzP/IApXoNWEW7OhcjkRT48jnAUIFXhAQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/selector-specificity": "^5.0.0", + "postcss-selector-parser": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/selector-resolve-nested": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-resolve-nested/-/selector-resolve-nested-3.1.0.tgz", + "integrity": "sha512-mf1LEW0tJLKfWyvn5KdDrhpxHyuxpbNwTIwOYLIvsTffeyOf85j5oIzfG0yosxDgx/sswlqBnESYUcQH0vgZ0g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss-selector-parser": "^7.0.0" + } + }, + "node_modules/@csstools/selector-specificity": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", + "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss-selector-parser": "^7.0.0" + } + }, + "node_modules/@marp-team/marp-cli": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/@marp-team/marp-cli/-/marp-cli-4.2.3.tgz", + "integrity": "sha512-yfEIkF7mlumg8CVV5m/UkLEDkW/ayM/SD6Bo8fbAvdscBQ/l9D44/aNFJsiqgNtjfktzpvRqjcBXNhWD0YTq5Q==", + "dev": true, + "dependencies": { + "@marp-team/marp-core": "^4.1.0", + "@marp-team/marpit": "^3.1.3", + "chokidar": "^4.0.3", + "cosmiconfig": "^9.0.0", + "puppeteer-core": "^24.16.0", + "serve-index": "^1.9.1", + "tmp": "^0.2.5", + "ws": "^8.18.3", + "yargs": "^17.7.2" + }, + "bin": { + "marp": "marp-cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@marp-team/marp-core": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@marp-team/marp-core/-/marp-core-4.2.0.tgz", + "integrity": "sha512-AoqRk9g6kF44OhdgV2eUsc0ciyxkI3IM/S4ntV+aHy59NSTcwPEHrbtAzqe+q2PK/trmQX233/0plusNZPoF+Q==", + "dev": true, + "dependencies": { + "@marp-team/marpit": "^3.2.0", + "@marp-team/marpit-svg-polyfill": "^2.1.0", + "highlight.js": "^11.11.1", + "katex": "^0.16.25", + "mathjax-full": "^3.2.2", + "postcss-selector-parser": "^7.1.0", + "xss": "^1.0.15" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@marp-team/marpit": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@marp-team/marpit/-/marpit-3.2.0.tgz", + "integrity": "sha512-DNCbwkAKugzCtiHJg/7DciIRwnKwAI2QH3VWWC1cVxoBBQTPnH5D9HcWqpDdduUqnCuW2PY78afVo+QlaInDdQ==", + "dependencies": { + "@csstools/postcss-is-pseudo-class": "^5.0.3", + "cssesc": "^3.0.0", + "js-yaml": "^4.1.0", + "lodash.kebabcase": "^4.1.1", + "markdown-it": "^14.1.0", + "markdown-it-front-matter": "^0.2.4", + "postcss": "^8.5.6", + "postcss-nesting": "^13.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@marp-team/marpit-svg-polyfill": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@marp-team/marpit-svg-polyfill/-/marpit-svg-polyfill-2.1.0.tgz", + "integrity": "sha512-VqCoAKwv1HJdzZp36dDPxznz2JZgRjkVSSPHpCzk72G2N753F0HPKXjevdjxmzN6gir9bUGBgMD1SguWJIi11A==", + "dev": true, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@marp-team/marpit": ">=0.5.0" + }, + "peerDependenciesMeta": { + "@marp-team/marpit": { + "optional": true + } + } + }, + "node_modules/@puppeteer/browsers": { + "version": "2.10.13", + "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.10.13.tgz", + "integrity": "sha512-a9Ruw3j3qlnB5a/zHRTkruppynxqaeE4H9WNj5eYGRWqw0ZauZ23f4W2ARf3hghF5doozyD+CRtt7XSYuYRI/Q==", + "dev": true, + "dependencies": { + "debug": "^4.4.3", + "extract-zip": "^2.0.1", + "progress": "^2.0.3", + "proxy-agent": "^6.5.0", + "semver": "^7.7.3", + "tar-fs": "^3.1.1", + "yargs": "^17.7.2" + }, + "bin": { + "browsers": "lib/cjs/main-cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@tootallnate/quickjs-emscripten": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", + "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", + "dev": true + }, + "node_modules/@types/node": { + "version": "24.10.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", + "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", + "dev": true, + "optional": true, + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/yauzl": { + "version": "2.10.3", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", + "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", + "dev": true, + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@xmldom/xmldom": { + "version": "0.9.8", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.9.8.tgz", + "integrity": "sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A==", + "dev": true, + "engines": { + "node": ">=14.6" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dev": true, + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "dev": true, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/ast-types": { + "version": "0.13.4", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", + "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", + "dev": true, + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/b4a": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.3.tgz", + "integrity": "sha512-5Q2mfq2WfGuFp3uS//0s6baOJLMoVduPYVeNmDYxu5OUA1/cBfvr2RIS7vi62LdNj/urk1hfmj867I3qt6uZ7Q==", + "dev": true, + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, + "node_modules/bare-events": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz", + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==", + "dev": true, + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } + }, + "node_modules/bare-fs": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.5.1.tgz", + "integrity": "sha512-zGUCsm3yv/ePt2PHNbVxjjn0nNB1MkIaR4wOCxJ2ig5pCf5cCVAYJXVhQg/3OhhJV6DB1ts7Hv0oUaElc2TPQg==", + "dev": true, + "optional": true, + "dependencies": { + "bare-events": "^2.5.4", + "bare-path": "^3.0.0", + "bare-stream": "^2.6.4", + "bare-url": "^2.2.2", + "fast-fifo": "^1.3.2" + }, + "engines": { + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } + } + }, + "node_modules/bare-os": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.2.tgz", + "integrity": "sha512-T+V1+1srU2qYNBmJCXZkUY5vQ0B4FSlL3QDROnKQYOqeiQR8UbjNHlPa+TIbM4cuidiN9GaTaOZgSEgsvPbh5A==", + "dev": true, + "optional": true, + "engines": { + "bare": ">=1.14.0" + } + }, + "node_modules/bare-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "dev": true, + "optional": true, + "dependencies": { + "bare-os": "^3.0.1" + } + }, + "node_modules/bare-stream": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.7.0.tgz", + "integrity": "sha512-oyXQNicV1y8nc2aKffH+BUHFRXmx6VrPzlnaEvMhram0nPBrKcEdcyBg5r08D0i8VxngHFAiVyn1QKXpSG0B8A==", + "dev": true, + "optional": true, + "dependencies": { + "streamx": "^2.21.0" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/bare-url": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.3.2.tgz", + "integrity": "sha512-ZMq4gd9ngV5aTMa5p9+UfY0b3skwhHELaDkhEHetMdX0LRkW9kzaym4oo/Eh+Ghm0CCDuMTsRIGM/ytUc1ZYmw==", + "dev": true, + "optional": true, + "dependencies": { + "bare-path": "^3.0.0" + } + }, + "node_modules/basic-ftp": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", + "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/batch": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==", + "dev": true + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/chromium-bidi": { + "version": "10.5.1", + "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-10.5.1.tgz", + "integrity": "sha512-rlj6OyhKhVTnk4aENcUme3Jl9h+cq4oXu4AzBcvr8RMmT6BR4a3zSNT9dbIfXr9/BS6ibzRyDhowuw4n2GgzsQ==", + "dev": true, + "dependencies": { + "mitt": "^3.0.1", + "zod": "^3.24.1" + }, + "peerDependencies": { + "devtools-protocol": "*" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/cosmiconfig": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz", + "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", + "dev": true, + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cssfilter": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cssfilter/-/cssfilter-0.0.10.tgz", + "integrity": "sha512-FAaLDaplstoRsDR8XGYH51znUN0UY7nMc6Z9/fvE8EXGwvJE9hu7W2vHwx1+bd6gCYnln9nLbzxFTrcO9YQDZw==", + "dev": true + }, + "node_modules/d3": { + "version": "3.5.17", + "resolved": "https://registry.npmjs.org/d3/-/d3-3.5.17.tgz", + "integrity": "sha512-yFk/2idb8OHPKkbAL8QaOaqENNoMhIaSHZerk3oQsECwkObkCpJyjYwCe+OHiq6UEdhe1m8ZGARRRO3ljFjlKg==" + }, + "node_modules/dagre-d3-renderer": { + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/dagre-d3-renderer/-/dagre-d3-renderer-0.4.26.tgz", + "integrity": "sha512-vOWj1uA4/APTrfDyfHaH/xpfXhPh9rszW+HOaEwPCeA6Afl06Lobfh7OpESuVMQW2QGuY4UQ7pte/p0WhdDs7w==", + "dependencies": { + "d3": "3.5.17", + "dagre-layout": "^0.8.0", + "graphlib": "^2.1.1", + "lodash": "^4.17.4" + } + }, + "node_modules/dagre-layout": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/dagre-layout/-/dagre-layout-0.8.8.tgz", + "integrity": "sha512-ZNV15T9za7X+fV8Z07IZquUKugCxm5owoiPPxfEx6OJRD331nkiIaF3vSt0JEY5FkrY0KfRQxcpQ3SpXB7pLPQ==", + "dependencies": { + "graphlibrary": "^2.2.0", + "lodash": "^4.17.5" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", + "dev": true, + "engines": { + "node": ">= 14" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/degenerator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", + "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", + "dev": true, + "dependencies": { + "ast-types": "^0.13.4", + "escodegen": "^2.1.0", + "esprima": "^4.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/devtools-protocol": { + "version": "0.0.1521046", + "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1521046.tgz", + "integrity": "sha512-vhE6eymDQSKWUXwwA37NtTTVEzjtGVfDr3pRbsWEQ5onH/Snp2c+2xZHWJJawG/0hCCJLRGt4xVtEVUVILol4w==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "dev": true + }, + "node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "dev": true, + "dependencies": { + "bare-events": "^2.7.0" + } + }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "dev": true + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-uri": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.5.tgz", + "integrity": "sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==", + "dev": true, + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/graphlib": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", + "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "dependencies": { + "lodash": "^4.17.15" + } + }, + "node_modules/graphlibrary": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/graphlibrary/-/graphlibrary-2.2.0.tgz", + "integrity": "sha512-XTcvT55L8u4MBZrM37zXoUxsgxs/7sow7YSygd9CIwfWTVO8RVu7AYXhhCiTuFEf+APKgx6Jk4SuQbYR0vYKmQ==", + "dependencies": { + "lodash": "^4.17.5" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "bin": { + "he": "bin/he" + } + }, + "node_modules/highlight.js": { + "version": "11.11.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.11.1.tgz", + "integrity": "sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/http-errors": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", + "dev": true, + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", + "dev": true + }, + "node_modules/ip-address": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/katex": { + "version": "0.16.25", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.25.tgz", + "integrity": "sha512-woHRUZ/iF23GBP1dkDQMh1QBad9dmr8/PAwNA54VrSOVYgI12MAcE14TqnDdQOdzyEonGzMepYnqBMYdsoAr8Q==", + "dev": true, + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.kebabcase": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", + "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==" + }, + "node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-front-matter": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/markdown-it-front-matter/-/markdown-it-front-matter-0.2.4.tgz", + "integrity": "sha512-25GUs0yjS2hLl8zAemVndeEzThB1p42yxuDEKbd4JlL3jiz+jsm6e56Ya8B0VREOkNxLYB4TTwaoPJ3ElMmW+w==" + }, + "node_modules/markdown-it-mermaid": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/markdown-it-mermaid/-/markdown-it-mermaid-0.2.5.tgz", + "integrity": "sha512-ZUTFRX+cXEtWmn/9LMlpVklPJiDrHPWyHE/wamC2wm0Ojh1qOcuKWfWW3BqP83+7w6C59rS7M3OrGTs/u9mQTA==", + "dependencies": { + "mermaid": "^7.1.2" + } + }, + "node_modules/mathjax-full": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/mathjax-full/-/mathjax-full-3.2.2.tgz", + "integrity": "sha512-+LfG9Fik+OuI8SLwsiR02IVdjcnRCy5MufYLi0C3TdMT56L/pjB0alMVGgoWJF8pN9Rc7FESycZB9BMNWIid5w==", + "deprecated": "Version 4 replaces this package with the scoped package @mathjax/src", + "dev": true, + "dependencies": { + "esm": "^3.2.25", + "mhchemparser": "^4.1.0", + "mj-context-menu": "^0.6.1", + "speech-rule-engine": "^4.0.6" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" + }, + "node_modules/mermaid": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-7.1.2.tgz", + "integrity": "sha512-bDLu3fQuf3/R0fNkNzB0GTaF7+6SxnZpfTs9DVQF1ougsuP23MBzvEIGfL0ML8zeyg7+J2D+0AaoLVhskW5ulw==", + "dependencies": { + "d3": "3.5.17", + "dagre-d3-renderer": "^0.4.25", + "dagre-layout": "^0.8.0", + "he": "^1.1.1", + "lodash": "^4.17.4", + "moment": "^2.20.1" + } + }, + "node_modules/mhchemparser": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/mhchemparser/-/mhchemparser-4.2.1.tgz", + "integrity": "sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ==", + "dev": true + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", + "dev": true + }, + "node_modules/mj-context-menu": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/mj-context-menu/-/mj-context-menu-0.6.1.tgz", + "integrity": "sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==", + "dev": true + }, + "node_modules/moment": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", + "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==", + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/netmask": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", + "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", + "dev": true, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/pac-proxy-agent": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz", + "integrity": "sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==", + "dev": true, + "dependencies": { + "@tootallnate/quickjs-emscripten": "^0.23.0", + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "get-uri": "^6.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.6", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/pac-resolver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", + "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", + "dev": true, + "dependencies": { + "degenerator": "^5.0.0", + "netmask": "^2.0.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-nesting": { + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-13.0.2.tgz", + "integrity": "sha512-1YCI290TX+VP0U/K/aFxzHzQWHWURL+CtHMSbex1lCdpXD1SoR2sYuxDu5aNI9lPoXpKTCggFZiDJbwylU0LEQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/selector-resolve-nested": "^3.1.0", + "@csstools/selector-specificity": "^5.0.0", + "postcss-selector-parser": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-selector-parser": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", + "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/proxy-agent": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.5.0.tgz", + "integrity": "sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.6", + "lru-cache": "^7.14.1", + "pac-proxy-agent": "^7.1.0", + "proxy-from-env": "^1.1.0", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/puppeteer-core": { + "version": "24.29.1", + "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-24.29.1.tgz", + "integrity": "sha512-ErJ9qKCK+bdLvBa7QVSQTBSPm8KZbl1yC/WvhrZ0ut27hDf2QBzjDsn1IukzE1i1KtZ7NYGETOV4W1beoo9izA==", + "dev": true, + "dependencies": { + "@puppeteer/browsers": "2.10.13", + "chromium-bidi": "10.5.1", + "debug": "^4.4.3", + "devtools-protocol": "0.0.1521046", + "typed-query-selector": "^2.12.0", + "webdriver-bidi-protocol": "0.3.8", + "ws": "^8.18.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/serve-index": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", + "dev": true, + "dependencies": { + "accepts": "~1.3.4", + "batch": "0.6.1", + "debug": "2.6.9", + "escape-html": "~1.0.3", + "http-errors": "~1.6.2", + "mime-types": "~2.1.17", + "parseurl": "~1.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/serve-index/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/serve-index/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==", + "dev": true + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz", + "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==", + "dev": true, + "dependencies": { + "ip-address": "^10.0.1", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", + "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/speech-rule-engine": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/speech-rule-engine/-/speech-rule-engine-4.1.2.tgz", + "integrity": "sha512-S6ji+flMEga+1QU79NDbwZ8Ivf0S/MpupQQiIC0rTpU/ZTKgcajijJJb1OcByBQDjrXCN1/DJtGz4ZJeBMPGJw==", + "dev": true, + "dependencies": { + "@xmldom/xmldom": "0.9.8", + "commander": "13.1.0", + "wicked-good-xpath": "1.3.0" + }, + "bin": { + "sre": "bin/sre" + } + }, + "node_modules/speech-rule-engine/node_modules/commander": { + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-13.1.0.tgz", + "integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/streamx": { + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", + "dev": true, + "dependencies": { + "events-universal": "^1.0.0", + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar-fs": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.1.tgz", + "integrity": "sha512-LZA0oaPOc2fVo82Txf3gw+AkEd38szODlptMYejQUhndHMLQ9M059uXR+AfS7DNo0NpINvSqDsvyaCrBVkptWg==", + "dev": true, + "dependencies": { + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" + } + }, + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dev": true, + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "dev": true, + "dependencies": { + "b4a": "^1.6.4" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "dev": true, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true + }, + "node_modules/typed-query-selector": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/typed-query-selector/-/typed-query-selector-2.12.0.tgz", + "integrity": "sha512-SbklCd1F0EiZOyPiW192rrHZzZ5sBijB6xM+cpmrwDqObvdtunOHHIk9fCGsoK5JVIYXoyEp4iEdE3upFH3PAg==", + "dev": true + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "optional": true + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/webdriver-bidi-protocol": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/webdriver-bidi-protocol/-/webdriver-bidi-protocol-0.3.8.tgz", + "integrity": "sha512-21Yi2GhGntMc671vNBCjiAeEVknXjVRoyu+k+9xOMShu+ZQfpGQwnBqbNz/Sv4GXZ6JmutlPAi2nIJcrymAWuQ==", + "dev": true + }, + "node_modules/wicked-good-xpath": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/wicked-good-xpath/-/wicked-good-xpath-1.3.0.tgz", + "integrity": "sha512-Gd9+TUn5nXdwj/hFsPVx5cuHHiF5Bwuc30jZ4+ronF1qHK5O7HD0sgmXWSEgwKquT3ClLoKPVbO6qGwVwLzvAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "dev": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xss": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.15.tgz", + "integrity": "sha512-FVdlVVC67WOIPvfOwhoMETV72f6GbW7aOabBC3WxN/oUdoEMDyLz4OgRv5/gck2ZeNqEQu+Tb0kloovXOfpYVg==", + "dev": true, + "dependencies": { + "commander": "^2.20.3", + "cssfilter": "0.0.10" + }, + "bin": { + "xss": "bin/xss" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/xss/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/examples/tool_registrations/package.json b/examples/tool_registrations/package.json new file mode 100644 index 00000000..f1865bc5 --- /dev/null +++ b/examples/tool_registrations/package.json @@ -0,0 +1,15 @@ +{ + "scripts": { + "marp": "marp --html embeddings.md", + "marp:watch": "marp --html embeddings.md --watch", + "marp:pdf": "marp --html embeddings.md --pdf -o embeddings.pdf", + "marp:html": "marp --html embeddings.md -o embeddings.html" + }, + "dependencies": { + "@marp-team/marpit": "^3.2.0", + "markdown-it-mermaid": "^0.2.5" + }, + "devDependencies": { + "@marp-team/marp-cli": "^4.2.3" + } +} diff --git a/examples/tool_registrations/registry.yaml b/examples/tool_registrations/registry.yaml new file mode 100644 index 00000000..ef345d1c --- /dev/null +++ b/examples/tool_registrations/registry.yaml @@ -0,0 +1,83 @@ +registry: + SQLite: + ref: "" + arxiv-mcp-server: + ref: "" + aws-documentation: + ref: "" + brave: + ref: "" + context7: + ref: "" + desktop-commander: + ref: "" + dockerhub: + ref: "" + duckduckgo: + ref: "" + elevenlabs: + ref: "" + everart: + ref: "" + fetch: + ref: "" + filesystem: + ref: "" + gemini-api-docs: + ref: "" + github-official: + ref: "" + gitmcp: + ref: "" + google-maps: + ref: "" + google-maps-comprehensive: + ref: "" + hugging-face: + ref: "" + linkedin-mcp-server: + ref: "" + markitdown: + ref: "" + maven-tools-mcp: + ref: "" + mcp-hackernews: + ref: "" + memory: + ref: "" + microsoft-learn: + ref: "" + minecraft-wiki: + ref: "" + node-code-sandbox: + ref: "" + notion-remote: + ref: "" + openapi: + ref: "" + openapi-schema: + ref: "" + openbnb-airbnb: + ref: "" + openweather: + ref: "" + playwright: + ref: "" + resend: + ref: "" + rust-mcp-filesystem: + ref: "" + semgrep: + ref: "" + sequentialthinking: + ref: "" + slack: + ref: "" + stripe: + ref: "" + time: + ref: "" + wikipedia-mcp: + ref: "" + youtube_transcript: + ref: "" diff --git a/examples/tool_registrations/run.sh b/examples/tool_registrations/run.sh new file mode 100755 index 00000000..e6e97040 --- /dev/null +++ b/examples/tool_registrations/run.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# List of servers to extract tool registrations from +SERVERS=( + "github-official" + "gitmcp" + "slack" + "fetch" + "duckduckgo" + "brave" + "context7" + "dockerhub" + "playwright" + "wikipedia-mcp" + "SQLite" + "notion-remote" + "rust-mcp-filesystem" + "arxiv-mcp-server" + "google-maps" + "google-maps-comprehensive" + "hugging-face" + "linkedin-mcp-server" + "desktop-commander" + "openbnb-airbnb" + "youtube_transcript" + "time" + "sequentialthinking" + "semgrep" + "resend" + "papersearch" + "openweather" + "openapi-schema" + "openapi" + "node-code-sandbox" + "minecraft-wiki" + "microsoft-learn" + "memory" + "mcp-hackernews" + "maven-tools-mcp" + "markitdown" + "gemini-api-docs" + "filesystem" + "everart" + "elevenlabs" + "stripe" +) + +# Common configuration +CATALOG="$HOME/.docker/mcp/catalogs/docker-mcp.yaml" +CONFIG="./config.yaml" + +# Loop through each server and extract tool registrations +for SERVER in "${SERVERS[@]}"; do + OUTPUT=./tool-json/"${SERVER}.json" + echo "Extracting tools from ${SERVER}..." + + go run main.go \ + -catalog "${CATALOG}" \ + -server "${SERVER}" \ + -config "${CONFIG}" \ + -output "${OUTPUT}" + + if [ $? -eq 0 ]; then + echo "✓ Successfully extracted tools from ${SERVER} to ${OUTPUT}" + else + echo "✗ Failed to extract tools from ${SERVER}" + fi + echo "" +done + +echo "Done! Extracted tool registrations from ${#SERVERS[@]} servers." diff --git a/pkg/gateway/codemode.go b/pkg/gateway/codemode.go new file mode 100644 index 00000000..61c6c117 --- /dev/null +++ b/pkg/gateway/codemode.go @@ -0,0 +1,191 @@ +package gateway + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/catalog" + "github.com/docker/mcp-gateway/pkg/codemode" +) + +// serverToolSetAdapter adapts a gateway server to the codemode.ToolSet interface +type serverToolSetAdapter struct { + gateway *Gateway + serverName string + serverConfig *catalog.ServerConfig + session *mcp.ServerSession +} + +func (a *serverToolSetAdapter) Tools(ctx context.Context) ([]*codemode.ToolWithHandler, error) { + // Get a client for this server + clientConfig := &clientConfig{ + serverSession: a.session, + server: a.gateway.mcpServer, + } + + client, err := a.gateway.clientPool.AcquireClient(ctx, a.serverConfig, clientConfig) + if err != nil { + return nil, fmt.Errorf("failed to acquire client for server %s: %w", a.serverName, err) + } + + // List tools from the server + listResult, err := client.Session().ListTools(ctx, &mcp.ListToolsParams{}) + if err != nil { + return nil, fmt.Errorf("failed to list tools from server %s: %w", a.serverName, err) + } + + // Convert MCP tools to ToolWithHandler + var result []*codemode.ToolWithHandler + for _, tool := range listResult.Tools { + // Create a handler that calls the tool on the remote server + handler := func(tool *mcp.Tool) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Forward the tool call to the actual server + return client.Session().CallTool(ctx, &mcp.CallToolParams{ + Name: tool.Name, + Arguments: req.Params.Arguments, + }) + } + }(tool) + + result = append(result, &codemode.ToolWithHandler{ + Tool: tool, + Handler: handler, + }) + } + + return result, nil +} + +func addCodemodeHandler(g *Gateway) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Servers []string `json:"servers"` + Name string `json:"name"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if len(params.Servers) == 0 { + return nil, fmt.Errorf("servers parameter is required and must not be empty") + } + + if params.Name == "" { + return nil, fmt.Errorf("name parameter is required") + } + + // Validate that all requested servers exist + for _, serverName := range params.Servers { + if _, _, found := g.configuration.Find(serverName); !found { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Error: Server '%s' not found in configuration. Use mcp-find to search for available servers.", serverName), + }}, + }, nil + } + } + + // Create a tool set adapter for each server + var toolSets []codemode.ToolSet + for _, serverName := range params.Servers { + serverConfig, _, _ := g.configuration.Find(serverName) + toolSets = append(toolSets, &serverToolSetAdapter{ + gateway: g, + serverName: serverName, + serverConfig: serverConfig, + session: req.Session, + }) + } + + // Wrap the tool sets with codemode + wrappedToolSet := codemode.Wrap(toolSets) + + // Get the generated tool from the wrapped toolset + tools, err := wrappedToolSet.Tools(ctx) + if err != nil { + return nil, fmt.Errorf("failed to create code-mode tools: %w", err) + } + + // Use the first tool (the JavaScript execution tool with all servers' tools available) + if len(tools) == 0 { + return nil, fmt.Errorf("no tools generated from wrapped toolset") + } + + customTool := tools[0] + toolName := fmt.Sprintf("code-mode-%s", params.Name) + + // Customize the tool name and description + customTool.Tool.Name = toolName + + // Add the tool to the gateway's MCP server + g.mcpServer.AddTool(customTool.Tool, customTool.Handler) + + // Track the tool registration for capabilities and mcp-exec + g.capabilitiesMu.Lock() + g.toolRegistrations[toolName] = ToolRegistration{ + ServerName: "code-mode", + Tool: customTool.Tool, + Handler: customTool.Handler, + } + g.capabilitiesMu.Unlock() + + // Build detailed response with tool information + var responseText strings.Builder + responseText.WriteString(fmt.Sprintf("Successfully created code-mode tool '%s'\n\n", toolName)) + + // Tool description + responseText.WriteString("## Tool Details\n") + responseText.WriteString(fmt.Sprintf("**Name:** %s\n", toolName)) + responseText.WriteString(fmt.Sprintf("**Description:** %s\n\n", customTool.Tool.Description)) + + // Input schema information + responseText.WriteString("## Input Schema\n") + if customTool.Tool.InputSchema != nil { + schemaJSON, err := json.MarshalIndent(customTool.Tool.InputSchema, "", " ") + if err == nil { + responseText.WriteString("```json\n") + responseText.WriteString(string(schemaJSON)) + responseText.WriteString("\n```\n\n") + } + } + + // Available servers + responseText.WriteString("## Available Servers\n") + responseText.WriteString(fmt.Sprintf("This tool has access to tools from: %s\n\n", strings.Join(params.Servers, ", "))) + + // Usage instructions + responseText.WriteString("## How to Use\n") + responseText.WriteString("You can call this tool using the **mcp-exec** tool:\n") + responseText.WriteString("```json\n") + responseText.WriteString("{\n") + responseText.WriteString(fmt.Sprintf(" \"name\": \"%s\",\n", toolName)) + responseText.WriteString(" \"arguments\": {\n") + responseText.WriteString(" \"script\": \"\"\n") + responseText.WriteString(" }\n") + responseText.WriteString("}\n") + responseText.WriteString("```\n\n") + responseText.WriteString("The tool is now available in your session and can be executed via mcp-exec.") + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: responseText.String(), + }}, + }, nil + } +} diff --git a/pkg/gateway/config.go b/pkg/gateway/config.go index 68f746b9..3c60d557 100644 --- a/pkg/gateway/config.go +++ b/pkg/gateway/config.go @@ -37,4 +37,5 @@ type Options struct { DynamicTools bool ToolNamePrefix bool LogFilePath string + UseEmbeddings bool } diff --git a/pkg/gateway/configset.go b/pkg/gateway/configset.go new file mode 100644 index 00000000..250873e0 --- /dev/null +++ b/pkg/gateway/configset.go @@ -0,0 +1,155 @@ +package gateway + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/google/jsonschema-go/jsonschema" + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/log" + "github.com/docker/mcp-gateway/pkg/oci" +) + +type configValue struct { + Server string `json:"server"` + Config map[string]any `json:"config"` +} + +func configSetHandler(g *Gateway) mcp.ToolHandler { + return func(_ context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params configValue + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Server == "" { + return nil, fmt.Errorf("server parameter is required") + } + + if params.Config == nil { + return nil, fmt.Errorf("config parameter is required") + } + + serverName := strings.TrimSpace(params.Server) + canonicalServerName := oci.CanonicalizeServerName(serverName) + + // Check if server exists in catalog + serverConfig, _, serverExists := g.configuration.Find(serverName) + + if !serverExists { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Error: Server '%s' not found in catalog. Use mcp-find to search for available servers.", serverName), + }}, + }, nil + } + + // Validate config against server's schema if schema exists + if serverConfig != nil && len(serverConfig.Spec.Config) > 0 { + var validationErrors []string + var schemaInfo strings.Builder + + schemaInfo.WriteString("Server config schema:\n") + + for _, configItem := range serverConfig.Spec.Config { + // Config items should be schema objects + schemaMap, ok := configItem.(map[string]any) + if !ok { + continue + } + + // Get the name field - this identifies which config to validate + configName, ok := schemaMap["name"].(string) + if !ok || configName == "" { + continue + } + + // Add schema to info + schemaBytes, _ := json.MarshalIndent(schemaMap, " ", " ") + schemaInfo.WriteString(fmt.Sprintf("\n%s:\n %s\n", configName, string(schemaBytes))) + + // Convert the schema map to a jsonschema.Schema for validation + schemaBytes, err := json.Marshal(schemaMap) + if err != nil { + validationErrors = append(validationErrors, fmt.Sprintf("%s: invalid schema definition", configName)) + continue + } + + var schema jsonschema.Schema + if err := json.Unmarshal(schemaBytes, &schema); err != nil { + validationErrors = append(validationErrors, fmt.Sprintf("%s: invalid schema definition", configName)) + continue + } + + // Resolve the schema + resolved, err := schema.Resolve(nil) + if err != nil { + validationErrors = append(validationErrors, fmt.Sprintf("%s: schema resolution failed", configName)) + continue + } + + // Validate the config value against the schema + if err := resolved.Validate(params.Config); err != nil { + validationErrors = append(validationErrors, fmt.Sprintf("%s: %s", configName, err.Error())) + } + } + + // If validation failed, return error with schema + if len(validationErrors) > 0 { + errorMessage := fmt.Sprintf("Config validation failed for server '%s':\n\n", serverName) + for _, errMsg := range validationErrors { + errorMessage += fmt.Sprintf(" - %s\n", errMsg) + } + errorMessage += "\n" + schemaInfo.String() + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: errorMessage, + }}, + IsError: true, + }, nil + } + } + + // Store old config for comparison + oldConfig := g.configuration.config[canonicalServerName] + oldConfigJSON, _ := json.MarshalIndent(oldConfig, "", " ") + + // Set the configuration + g.configuration.config[canonicalServerName] = params.Config + + // Format new config for display + newConfigJSON, _ := json.MarshalIndent(params.Config, "", " ") + + // Log the configuration change + log.Log(fmt.Sprintf(" - Set config for server '%s': %s", serverName, string(newConfigJSON))) + + var resultMessage string + if oldConfig != nil { + resultMessage = fmt.Sprintf("Successfully updated config for server '%s':\n\nOld config:\n%s\n\nNew config:\n%s", + serverName, string(oldConfigJSON), string(newConfigJSON)) + } else { + resultMessage = fmt.Sprintf("Successfully set config for server '%s':\n\n%s", serverName, string(newConfigJSON)) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: resultMessage, + }}, + }, nil + } +} diff --git a/pkg/gateway/createprofile.go b/pkg/gateway/createprofile.go new file mode 100644 index 00000000..305b6f5b --- /dev/null +++ b/pkg/gateway/createprofile.go @@ -0,0 +1,193 @@ +package gateway + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "fmt" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/db" + "github.com/docker/mcp-gateway/pkg/log" + "github.com/docker/mcp-gateway/pkg/oci" + "github.com/docker/mcp-gateway/pkg/workingset" +) + +func createProfileHandler(g *Gateway) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Name string `json:"name"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Name == "" { + return nil, fmt.Errorf("name parameter is required") + } + + profileName := params.Name + + // Create DAO and OCI service + dao, err := db.New() + if err != nil { + return nil, fmt.Errorf("failed to create database client: %w", err) + } + + ociService := oci.NewService() + + // Build the working set from current gateway state + servers := make([]workingset.Server, 0, len(g.configuration.serverNames)) + for _, serverName := range g.configuration.serverNames { + catalogServer, found := g.configuration.servers[serverName] + if !found { + log.Logf("Warning: server %s not found in catalog, skipping", serverName) + continue + } + + // Determine server type based on whether it has an image + serverType := workingset.ServerTypeImage + if catalogServer.Image == "" { + // Skip servers without images for now (registry servers) + log.Logf("Warning: server %s has no image, skipping", serverName) + continue + } + + // Get config for this server + serverConfig := g.configuration.config[serverName] + if serverConfig == nil { + serverConfig = make(map[string]any) + } + + // Get tools for this server + var serverTools []string + if g.configuration.tools.ServerTools != nil { + serverTools = g.configuration.tools.ServerTools[serverName] + } + + // Create server entry + server := workingset.Server{ + Type: serverType, + Image: catalogServer.Image, + Config: serverConfig, + Secrets: "default", + Tools: serverTools, + Snapshot: &workingset.ServerSnapshot{ + Server: catalogServer, + }, + } + + servers = append(servers, server) + } + + if len(servers) == 0 { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: "No servers with images found in current gateway state. Cannot create profile.", + }}, + IsError: true, + }, nil + } + + // Add default secrets + secrets := make(map[string]workingset.Secret) + secrets["default"] = workingset.Secret{ + Provider: workingset.SecretProviderDockerDesktop, + } + + // Check if profile already exists + existingProfile, err := dao.GetWorkingSet(ctx, profileName) + isUpdate := false + profileID := profileName + + if err != nil { + if !errors.Is(err, sql.ErrNoRows) { + return nil, fmt.Errorf("failed to check for existing profile: %w", err) + } + // Profile doesn't exist, we'll create it + } else { + // Profile exists, we'll update it + isUpdate = true + profileID = existingProfile.ID + } + + // Create working set + ws := workingset.WorkingSet{ + Version: workingset.CurrentWorkingSetVersion, + ID: profileID, + Name: profileName, + Servers: servers, + Secrets: secrets, + } + + // Ensure snapshots are resolved + if err := ws.EnsureSnapshotsResolved(ctx, ociService); err != nil { + return nil, fmt.Errorf("failed to resolve snapshots: %w", err) + } + + // Validate the working set + if err := ws.Validate(); err != nil { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Profile validation failed: %v", err), + }}, + IsError: true, + }, nil + } + + // Create or update the profile + if isUpdate { + if err := dao.UpdateWorkingSet(ctx, ws.ToDb()); err != nil { + return nil, fmt.Errorf("failed to update profile: %w", err) + } + log.Logf("Updated profile %s with %d servers", profileID, len(servers)) + } else { + if err := dao.CreateWorkingSet(ctx, ws.ToDb()); err != nil { + return nil, fmt.Errorf("failed to create profile: %w", err) + } + log.Logf("Created profile %s with %d servers", profileID, len(servers)) + } + + // Build success message + var resultMessage string + if isUpdate { + resultMessage = fmt.Sprintf("Successfully updated profile '%s' (ID: %s) with %d servers:\n", profileName, profileID, len(servers)) + } else { + resultMessage = fmt.Sprintf("Successfully created profile '%s' (ID: %s) with %d servers:\n", profileName, profileID, len(servers)) + } + + // List the servers in the profile + for i, server := range servers { + serverName := server.Snapshot.Server.Name + resultMessage += fmt.Sprintf("\n%d. %s", i+1, serverName) + if server.Image != "" { + resultMessage += fmt.Sprintf(" (image: %s)", server.Image) + } + if len(server.Tools) > 0 { + resultMessage += fmt.Sprintf(" - %d tools", len(server.Tools)) + } + if len(server.Config) > 0 { + resultMessage += " - configured" + } + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: resultMessage, + }}, + }, nil + } +} diff --git a/pkg/gateway/dynamic_mcps.go b/pkg/gateway/dynamic_mcps.go index a7384b87..6dad0342 100644 --- a/pkg/gateway/dynamic_mcps.go +++ b/pkg/gateway/dynamic_mcps.go @@ -2,13 +2,8 @@ package gateway import ( "context" - "encoding/json" "fmt" - "io" - "net/http" "os" - "slices" - "strings" "time" "github.com/google/jsonschema-go/jsonschema" @@ -17,18 +12,17 @@ import ( "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/metric" - "github.com/docker/mcp-gateway/pkg/catalog" - "github.com/docker/mcp-gateway/pkg/codemode" - "github.com/docker/mcp-gateway/pkg/log" - "github.com/docker/mcp-gateway/pkg/oci" "github.com/docker/mcp-gateway/pkg/telemetry" ) // mcpFindTool implements a tool for finding MCP servers in the catalog -func (g *Gateway) createMcpFindTool(configuration Configuration) *ToolRegistration { +func (g *Gateway) createMcpFindTool(_ Configuration, handler mcp.ToolHandler) *ToolRegistration { tool := &mcp.Tool{ - Name: "mcp-find", - Description: "Find MCP servers in the current catalog by name, title, or description. Returns matching servers with their details.", + Name: "mcp-find", + Description: `Find MCP servers in the current catalog by name, title, or description. +If the user is looking for new capabilities, use this tool to search the MCP catalog for servers that should potentially be enabled. +This will not enable the server but will return information about servers that could be enabled. +If we find an mcp server, it can be added with the mcp-add tool, and configured with mcp-config-set.`, InputSchema: &jsonschema.Schema{ Type: "object", Properties: map[string]*jsonschema.Schema{ @@ -45,180 +39,89 @@ func (g *Gateway) createMcpFindTool(configuration Configuration) *ToolRegistrati }, } - handler := func(_ context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params struct { - Query string `json:"query"` - Limit int `json:"limit"` - } - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if params.Query == "" { - return nil, fmt.Errorf("query parameter is required") - } - - if params.Limit <= 0 { - params.Limit = 10 - } - - // Search through the catalog servers - query := strings.ToLower(strings.TrimSpace(params.Query)) - var matches []ServerMatch - - for serverName, server := range configuration.servers { - match := false - score := 0 - - // Check server name (exact match gets higher score) - serverNameLower := strings.ToLower(serverName) - if serverNameLower == query { - match = true - score = 100 - } else if strings.Contains(serverNameLower, query) { - match = true - score = 50 - } - - // Check server title - if server.Title != "" { - titleLower := strings.ToLower(server.Title) - if titleLower == query { - match = true - score = maxInt(score, 97) - } else if strings.Contains(titleLower, query) { - match = true - score = maxInt(score, 47) - } - } - - // Check server description - if server.Description != "" { - descriptionLower := strings.ToLower(server.Description) - if descriptionLower == query { - match = true - score = maxInt(score, 95) - } else if strings.Contains(descriptionLower, query) { - match = true - score = maxInt(score, 45) - } - } - - // Check if it has tools that might match - for _, tool := range server.Tools { - toolNameLower := strings.ToLower(tool.Name) - toolDescLower := strings.ToLower(tool.Description) - - if toolNameLower == query { - match = true - score = maxInt(score, 90) - } else if strings.Contains(toolNameLower, query) { - match = true - score = maxInt(score, 40) - } else if strings.Contains(toolDescLower, query) { - match = true - score = maxInt(score, 30) - } - } - - // Check image name - if server.Image != "" { - imageLower := strings.ToLower(server.Image) - if strings.Contains(imageLower, query) { - match = true - score = maxInt(score, 20) - } - } - - if match { - matches = append(matches, ServerMatch{ - Name: serverName, - Server: server, - Score: score, - }) - } - } - - // Sort matches by score (higher scores first) - for i := range len(matches) - 1 { - for j := i + 1; j < len(matches); j++ { - if matches[i].Score < matches[j].Score { - matches[i], matches[j] = matches[j], matches[i] - } - } - } - - // Limit results - if len(matches) > params.Limit { - matches = matches[:params.Limit] - } - - // Format results - var results []map[string]any - for _, match := range matches { - serverInfo := map[string]any{ - "name": match.Name, - } - - if match.Server.Description != "" { - serverInfo["description"] = match.Server.Description - } - - if len(match.Server.Secrets) > 0 { - var secrets []string - for _, secret := range match.Server.Secrets { - secrets = append(secrets, secret.Name) - } - serverInfo["required_secrets"] = secrets - } - - if len(match.Server.Config) > 0 { - serverInfo["config_schema"] = match.Server.Config - } - - serverInfo["long_lived"] = match.Server.LongLived - - results = append(results, serverInfo) - } + return &ToolRegistration{ + Tool: tool, + Handler: withToolTelemetry("mcp-find", handler), + } +} - response := map[string]any{ - "query": params.Query, - "total_matches": len(results), - "servers": results, - } +func (g *Gateway) createMcpAddTool(clientConfig *clientConfig) *ToolRegistration { + tool := &mcp.Tool{ + Name: "mcp-add", + Description: `Add a new MCP server to the session. +The server must exist in the catalog.`, + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "Name of the MCP server to add to the registry (must exist in catalog)", + }, + "activate": { + Type: "boolean", + Description: "Activate all of the server's tools in the current session", + }, + }, + Required: []string{"name"}, + }, + } - responseBytes, err := json.Marshal(response) - if err != nil { - return nil, fmt.Errorf("failed to marshal response: %w", err) - } + return &ToolRegistration{ + Tool: tool, + Handler: withToolTelemetry("mcp-add", addServerHandler(g, clientConfig)), + } +} - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{Text: string(responseBytes)}}, - }, nil +// mcpConfigSetTool implements a tool for setting configuration values for MCP servers +func (g *Gateway) createMcpConfigSetTool(_ *clientConfig) *ToolRegistration { + tool := &mcp.Tool{ + Name: "mcp-config-set", + Description: `Set configuration for an MCP server. +The config object will be validated against the server's config schema. If validation fails, the error message will include the correct schema.`, + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "server": { + Type: "string", + Description: "Name of the MCP server to configure", + }, + "config": { + Type: "object", + Description: "Configuration object for the server. This will be validated against the server's config schema.", + }, + }, + Required: []string{"server", "config"}, + }, } return &ToolRegistration{ Tool: tool, - Handler: withToolTelemetry("mcp-find", handler), + Handler: withToolTelemetry("mcp-config-set", configSetHandler(g)), } } -// ServerMatch represents a search result -type ServerMatch struct { - Name string - Server catalog.Server - Score int +func (g *Gateway) createMcpCreateProfileTool(_ *clientConfig) *ToolRegistration { + tool := &mcp.Tool{ + Name: "mcp-create-profile", + Description: `Create or update a profile with the current gateway state. +A profile is a snapshot of all currently enabled servers and their configurations. +If a profile with the given name already exists, it will be updated with the current state.`, + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "name": { + Type: "string", + Description: "Name of the profile to create or update", + }, + }, + Required: []string{"name"}, + }, + } + + return &ToolRegistration{ + Tool: tool, + Handler: withToolTelemetry("mcp-create-profile", createProfileHandler(g)), + } } func (g *Gateway) createCodeModeTool(_ *clientConfig) *ToolRegistration { @@ -247,185 +150,10 @@ mcp-add, we don't have to activate the tools. Required: []string{"servers", "name"}, }, } - handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params struct { - Servers []string `json:"servers"` - Name string `json:"name"` - } - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if len(params.Servers) == 0 { - return nil, fmt.Errorf("servers parameter is required and must not be empty") - } - - if params.Name == "" { - return nil, fmt.Errorf("name parameter is required") - } - - // Validate that all requested servers exist - for _, serverName := range params.Servers { - if _, _, found := g.configuration.Find(serverName); !found { - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("Error: Server '%s' not found in configuration. Use mcp-find to search for available servers.", serverName), - }}, - }, nil - } - } - - // Create a tool set adapter for each server - var toolSets []codemode.ToolSet - for _, serverName := range params.Servers { - serverConfig, _, _ := g.configuration.Find(serverName) - toolSets = append(toolSets, &serverToolSetAdapter{ - gateway: g, - serverName: serverName, - serverConfig: serverConfig, - session: req.Session, - }) - } - - // Wrap the tool sets with codemode - wrappedToolSet := codemode.Wrap(toolSets) - - // Get the generated tool from the wrapped toolset - tools, err := wrappedToolSet.Tools(ctx) - if err != nil { - return nil, fmt.Errorf("failed to create code-mode tools: %w", err) - } - - // Use the first tool (the JavaScript execution tool with all servers' tools available) - if len(tools) == 0 { - return nil, fmt.Errorf("no tools generated from wrapped toolset") - } - - customTool := tools[0] - toolName := fmt.Sprintf("code-mode-%s", params.Name) - - // Customize the tool name and description - customTool.Tool.Name = toolName - - // Add the tool to the gateway's MCP server - g.mcpServer.AddTool(customTool.Tool, customTool.Handler) - - // Track the tool registration for capabilities and mcp-exec - g.capabilitiesMu.Lock() - g.toolRegistrations[toolName] = ToolRegistration{ - ServerName: "code-mode", - Tool: customTool.Tool, - Handler: customTool.Handler, - } - g.capabilitiesMu.Unlock() - - // Build detailed response with tool information - var responseText strings.Builder - responseText.WriteString(fmt.Sprintf("Successfully created code-mode tool '%s'\n\n", toolName)) - - // Tool description - responseText.WriteString("## Tool Details\n") - responseText.WriteString(fmt.Sprintf("**Name:** %s\n", toolName)) - responseText.WriteString(fmt.Sprintf("**Description:** %s\n\n", customTool.Tool.Description)) - - // Input schema information - responseText.WriteString("## Input Schema\n") - if customTool.Tool.InputSchema != nil { - schemaJSON, err := json.MarshalIndent(customTool.Tool.InputSchema, "", " ") - if err == nil { - responseText.WriteString("```json\n") - responseText.WriteString(string(schemaJSON)) - responseText.WriteString("\n```\n\n") - } - } - - // Available servers - responseText.WriteString("## Available Servers\n") - responseText.WriteString(fmt.Sprintf("This tool has access to tools from: %s\n\n", strings.Join(params.Servers, ", "))) - - // Usage instructions - responseText.WriteString("## How to Use\n") - responseText.WriteString("You can call this tool using the **mcp-exec** tool:\n") - responseText.WriteString("```json\n") - responseText.WriteString("{\n") - responseText.WriteString(fmt.Sprintf(" \"name\": \"%s\",\n", toolName)) - responseText.WriteString(" \"arguments\": {\n") - responseText.WriteString(" \"script\": \"\"\n") - responseText.WriteString(" }\n") - responseText.WriteString("}\n") - responseText.WriteString("```\n\n") - responseText.WriteString("The tool is now available in your session and can be executed via mcp-exec.") - - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: responseText.String(), - }}, - }, nil - } return &ToolRegistration{ Tool: tool, - Handler: withToolTelemetry("code-mode", handler), - } -} - -// serverToolSetAdapter adapts a gateway server to the codemode.ToolSet interface -type serverToolSetAdapter struct { - gateway *Gateway - serverName string - serverConfig *catalog.ServerConfig - session *mcp.ServerSession -} - -func (a *serverToolSetAdapter) Tools(ctx context.Context) ([]*codemode.ToolWithHandler, error) { - // Get a client for this server - clientConfig := &clientConfig{ - serverSession: a.session, - server: a.gateway.mcpServer, - } - - client, err := a.gateway.clientPool.AcquireClient(ctx, a.serverConfig, clientConfig) - if err != nil { - return nil, fmt.Errorf("failed to acquire client for server %s: %w", a.serverName, err) - } - - // List tools from the server - listResult, err := client.Session().ListTools(ctx, &mcp.ListToolsParams{}) - if err != nil { - return nil, fmt.Errorf("failed to list tools from server %s: %w", a.serverName, err) + Handler: withToolTelemetry("code-mode", addCodemodeHandler(g)), } - - // Convert MCP tools to ToolWithHandler - var result []*codemode.ToolWithHandler - for _, tool := range listResult.Tools { - // Create a handler that calls the tool on the remote server - handler := func(tool *mcp.Tool) mcp.ToolHandler { - return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Forward the tool call to the actual server - return client.Session().CallTool(ctx, &mcp.CallToolParams{ - Name: tool.Name, - Arguments: req.Params.Arguments, - }) - } - }(tool) - - result = append(result, &codemode.ToolWithHandler{ - Tool: tool, - Handler: handler, - }) - } - - return result, nil } // mcpRemoveTool implements a tool for removing servers from the registry @@ -445,58 +173,9 @@ func (g *Gateway) createMcpRemoveTool() *ToolRegistration { }, } - handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params struct { - Name string `json:"name"` - } - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if params.Name == "" { - return nil, fmt.Errorf("name parameter is required") - } - - serverName := strings.TrimSpace(params.Name) - - // Remove the server from the current serverNames - updatedServerNames := slices.DeleteFunc(slices.Clone(g.configuration.serverNames), func(name string) bool { - return name == serverName - }) - - // Update the current configuration state - g.configuration.serverNames = updatedServerNames - - // Stop OAuth provider if this is an OAuth server - if g.McpOAuthDcrEnabled { - g.stopProvider(serverName) - } - - if err := g.removeServerConfiguration(ctx, serverName); err != nil { - return nil, fmt.Errorf("failed to remove server configuration: %w", err) - } - - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("Successfully removed server '%s'.", serverName), - }}, - }, nil - } - return &ToolRegistration{ Tool: tool, - Handler: withToolTelemetry("mcp-remove", handler), + Handler: withToolTelemetry("mcp-remove", removeServerHandler(g)), } } @@ -517,316 +196,9 @@ func (g *Gateway) createMcpRegistryImportTool(configuration Configuration, _ *cl }, } - handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params struct { - URL string `json:"url"` - } - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if params.URL == "" { - return nil, fmt.Errorf("url parameter is required") - } - - registryURL := strings.TrimSpace(params.URL) - - // Validate URL scheme - if !strings.HasPrefix(registryURL, "http://") && !strings.HasPrefix(registryURL, "https://") { - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("Error: URL must start with http:// or https://, got: %s", registryURL), - }}, - }, nil - } - - // Fetch servers from the URL - servers, err := g.readServersFromURL(ctx, registryURL) - if err != nil { - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("Error fetching servers from URL %s: %v", registryURL, err), - }}, - }, nil - } - - if len(servers) == 0 { - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("No servers found at URL: %s", registryURL), - }}, - }, nil - } - - // Add the imported servers to the current configuration and build detailed summary - var importedServerNames []string - var serverSummaries []string - - for serverName, server := range servers { - if _, exists := configuration.servers[serverName]; exists { - log.Log(fmt.Sprintf("Warning: server '%s' from URL %s overwrites existing server", serverName, registryURL)) - } - configuration.servers[serverName] = server - importedServerNames = append(importedServerNames, serverName) - - // Build detailed summary for this server - summary := fmt.Sprintf("• %s", serverName) - - if server.Description != "" { - summary += fmt.Sprintf("\n Description: %s", server.Description) - } - - if server.Image != "" { - summary += fmt.Sprintf("\n Image: %s", server.Image) - } - - // List required secrets - if len(server.Secrets) > 0 { - var secretNames []string - for _, secret := range server.Secrets { - secretNames = append(secretNames, secret.Name) - } - summary += fmt.Sprintf("\n Required Secrets: %s", strings.Join(secretNames, ", ")) - summary += "\n ⚠️ Configure these secrets before using this server" - } - - // List configuration schemas available - if len(server.Config) > 0 { - summary += fmt.Sprintf("\n Configuration Schemas: %d available", len(server.Config)) - summary += "\n ℹ️ Use mcp-config-set to configure optional settings" - } - - if server.LongLived { - summary += "\n 🔄 Long-lived server (stays running)" - } - - serverSummaries = append(serverSummaries, summary) - } - - // Create comprehensive result message - resultText := fmt.Sprintf("Successfully imported %d servers from %s\n\n", len(importedServerNames), registryURL) - resultText += strings.Join(serverSummaries, "\n\n") - - if len(importedServerNames) > 0 { - resultText += fmt.Sprintf("\n\n✅ Servers ready to use: %s", strings.Join(importedServerNames, ", ")) - } - - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: resultText, - }}, - }, nil - } - return &ToolRegistration{ Tool: tool, - Handler: withToolTelemetry("mcp-registry-import", handler), - } -} - -// readServersFromURL fetches and parses server definitions from a URL -// -//nolint:unused -func (g *Gateway) readServersFromURL(ctx context.Context, url string) (map[string]catalog.Server, error) { - servers := make(map[string]catalog.Server) - - log.Log(fmt.Sprintf(" - Reading servers from URL: %s", url)) - - // Create HTTP request with context - req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) - if err != nil { - return nil, fmt.Errorf("failed to create HTTP request: %w", err) - } - - // Set a reasonable user agent - req.Header.Set("User-Agent", "docker-mcp-gateway/1.0.0") - - // Make the HTTP request - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to fetch URL: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("HTTP request failed with status %d: %s", resp.StatusCode, resp.Status) - } - - // Read the response body - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read response body: %w", err) - } - - // Try to parse as oci.ServerDetail (the new structure) - var serverDetail oci.ServerDetail - if err := json.Unmarshal(body, &serverDetail); err == nil && serverDetail.Name != "" { - // Successfully parsed as ServerDetail - convert to catalog.Server - server := serverDetail.ToCatalogServer() - - serverName := serverDetail.Name - servers[serverName] = server - log.Log(fmt.Sprintf(" - Added server '%s' from URL %s", serverName, url)) - return servers, nil - } - - return nil, fmt.Errorf("unable to parse response as OCI catalog or direct catalog format") -} - -type configValue struct { - Server string `json:"server"` - Key string `json:"key"` - Value any `json:"value"` -} - -// formatConfigValue formats a config value for display, handling arrays, objects, and primitives -func formatConfigValue(value any) string { - if value == nil { - return "null" - } - - // Try to format as JSON for complex types - switch v := value.(type) { - case string: - return fmt.Sprintf("%q", v) - case []any: - // Format array with proper JSON - jsonBytes, err := json.Marshal(v) - if err != nil { - return fmt.Sprintf("%v", v) - } - return string(jsonBytes) - case map[string]any: - // Format object with proper JSON - jsonBytes, err := json.Marshal(v) - if err != nil { - return fmt.Sprintf("%v", v) - } - return string(jsonBytes) - default: - // For numbers, booleans, etc. - return fmt.Sprintf("%v", v) - } -} - -// mcpConfigSetTool implements a tool for setting configuration values for MCP servers -func (g *Gateway) createMcpConfigSetTool(_ *clientConfig) *ToolRegistration { - tool := &mcp.Tool{ - Name: "mcp-config-set", - Description: "Set configuration values for MCP servers. Creates or updates server configuration with the specified key-value pairs. Supports strings, numbers, booleans, objects, and arrays.", - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "server": { - Type: "string", - Description: "Name of the MCP server to configure", - }, - "key": { - Type: "string", - Description: "Configuration key to set. This is not to be prefixed by the server name.", - }, - "value": { - Types: []string{"string", "number", "boolean", "object", "array"}, - Description: "Configuration value to set (can be string, number, boolean, object, or array)", - Items: &jsonschema.Schema{Type: "object"}, - }, - }, - Required: []string{"server", "key", "value"}, - }, - } - - handler := func(_ context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params configValue - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if params.Server == "" { - return nil, fmt.Errorf("server parameter is required") - } - - if params.Key == "" { - return nil, fmt.Errorf("key parameter is required") - } - - serverName := strings.TrimSpace(params.Server) - configKey := strings.TrimSpace(params.Key) - - // Decode JSON-encoded values (e.g., arrays passed as strings) - finalValue := params.Value - if strValue, ok := params.Value.(string); ok { - // Try to JSON decode the string value - var decoded any - if err := json.Unmarshal([]byte(strValue), &decoded); err == nil { - // Successfully decoded - use the decoded value - finalValue = decoded - } - // If decoding fails, keep the original string value - } - - // Check if server exists in catalog (optional check - we can configure servers that don't exist yet) - _, _, serverExists := g.configuration.Find(serverName) - - // Initialize the server's config map if it doesn't exist - if g.configuration.config[serverName] == nil { - g.configuration.config[serverName] = make(map[string]any) - } - - // Set the configuration value - oldValue := g.configuration.config[serverName][configKey] - g.configuration.config[serverName][configKey] = finalValue - - // Format the value for display - valueStr := formatConfigValue(finalValue) - oldValueStr := formatConfigValue(oldValue) - - // Log the configuration change - log.Log(fmt.Sprintf(" - Set config for server '%s': %s = %s", serverName, configKey, valueStr)) - - var resultMessage string - if oldValue != nil { - resultMessage = fmt.Sprintf("Successfully updated config for server '%s': %s = %s (was: %s)", serverName, configKey, valueStr, oldValueStr) - } else { - resultMessage = fmt.Sprintf("Successfully set config for server '%s': %s = %s", serverName, configKey, valueStr) - } - - if !serverExists { - resultMessage += fmt.Sprintf(" (Note: server '%s' is not in the current catalog)", serverName) - } - - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: resultMessage, - }}, - }, nil - } - - return &ToolRegistration{ - Tool: tool, - Handler: withToolTelemetry("mcp-config-set", handler), + Handler: withToolTelemetry("mcp-registry-import", registryImportHandler(g, configuration)), } } @@ -844,92 +216,17 @@ func (g *Gateway) createMcpExecTool() *ToolRegistration { Description: "Name of the tool to execute", }, "arguments": { - Types: []string{"string", "number", "boolean", "object", "array", "null"}, - Description: "Arguments to pass to the tool (can be any valid JSON value)", + Type: "object", + Description: "Arguments to use for the tool call.", }, }, Required: []string{"name"}, }, } - handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { - // Parse parameters - var params struct { - Name string `json:"name"` - Arguments json.RawMessage `json:"arguments"` - } - - if req.Params.Arguments == nil { - return nil, fmt.Errorf("missing arguments") - } - - paramsBytes, err := json.Marshal(req.Params.Arguments) - if err != nil { - return nil, fmt.Errorf("failed to marshal arguments: %w", err) - } - - if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { - return nil, fmt.Errorf("failed to parse arguments: %w", err) - } - - if params.Name == "" { - return nil, fmt.Errorf("name parameter is required") - } - - toolName := strings.TrimSpace(params.Name) - - // Look up the tool in current tool registrations - g.capabilitiesMu.RLock() - toolReg, found := g.toolRegistrations[toolName] - g.capabilitiesMu.RUnlock() - - if !found { - return &mcp.CallToolResult{ - Content: []mcp.Content{&mcp.TextContent{ - Text: fmt.Sprintf("Error: Tool '%s' not found in current session. Make sure the server providing this tool is added to the session.", toolName), - }}, - }, nil - } - - // Handle the case where arguments might be a JSON-encoded string - // This happens when the schema previously specified Type: "string" - var toolArguments json.RawMessage - if len(params.Arguments) > 0 { - // Try to unmarshal as a string first (for backward compatibility) - var argString string - if err := json.Unmarshal(params.Arguments, &argString); err == nil { - // It was a JSON string, use the unescaped content - toolArguments = json.RawMessage(argString) - } else { - // It's already a proper JSON object/value - toolArguments = params.Arguments - } - } - - // Create a new CallToolRequest with the provided arguments - log.Logf("calling tool %s with %s", toolName, toolArguments) - toolCallRequest := &mcp.CallToolRequest{ - Session: req.Session, - Params: &mcp.CallToolParamsRaw{ - Meta: req.Params.Meta, - Name: toolName, - Arguments: toolArguments, - }, - Extra: req.Extra, - } - - // Execute the tool using its registered handler - result, err := toolReg.Handler(ctx, toolCallRequest) - if err != nil { - return nil, fmt.Errorf("tool execution failed: %w", err) - } - - return result, nil - } - return &ToolRegistration{ Tool: tool, - Handler: withToolTelemetry("mcp-exec", handler), + Handler: withToolTelemetry("mcp-exec", addMcpExecHandler(g)), } } @@ -953,14 +250,6 @@ func (g *Gateway) _createMcpCatalogTool() *ToolRegistration { } } -// maxInt returns the maximum of two integers -func maxInt(a, b int) int { - if a > b { - return a - } - return b -} - // withToolTelemetry wraps a tool handler with telemetry instrumentation func withToolTelemetry(toolName string, handler mcp.ToolHandler) mcp.ToolHandler { return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { diff --git a/pkg/gateway/embeddings/README.md b/pkg/gateway/embeddings/README.md new file mode 100644 index 00000000..72c9179e --- /dev/null +++ b/pkg/gateway/embeddings/README.md @@ -0,0 +1,185 @@ +# Embeddings Package + +This package provides a Go client for the vector-db MCP server running in a Docker container. It's a translation of the Clojure namespace from `test/embeddings/clj/vector_db_process.clj`. + +## Overview + +The embeddings package provides: + +1. **Container Management** - Automatically starts and manages the vector-db Docker container +2. **MCP Client** - Connects to the vector database via the official Go MCP SDK +3. **Vector Operations** - High-level functions for working with vector collections and embeddings + +## Features + +- Start/stop vector DB container automatically +- MCP protocol communication via stdio +- Collection management (create, delete, list) +- Vector operations (add, delete, search) +- Cosine distance similarity search +- Metadata support for vectors +- Full type safety with Go + +## Usage + +### Basic Example + +```go +package main + +import ( + "context" + "log" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" +) + +func main() { + ctx := context.Background() + + // Create client (this starts the container) + // The dimension parameter specifies the vector dimension (1536 for OpenAI embeddings) + client, err := embeddings.NewVectorDBClient(ctx, "./data", 1536, nil) + if err != nil { + log.Fatal(err) + } + defer client.Close() + + // Create a collection + _, err = client.CreateCollection(ctx, "my-vectors") + if err != nil { + log.Fatal(err) + } + + // Add a vector (1536 dimensions for OpenAI embeddings) + vector := make([]float64, 1536) + for i := range vector { + vector[i] = 0.1 // Your actual embedding values here + } + + metadata := map[string]interface{}{ + "text": "This is my document", + "source": "example.txt", + } + + _, err = client.AddVector(ctx, "my-vectors", vector, metadata) + if err != nil { + log.Fatal(err) + } + + // Search for similar vectors + results, err := client.SearchVectors(ctx, vector, &embeddings.SearchOptions{ + CollectionName: "my-vectors", + Limit: 10, + }) + if err != nil { + log.Fatal(err) + } + + for _, result := range results { + log.Printf("Match: ID=%d, Distance=%f, Metadata=%v\n", + result.ID, result.Distance, result.Metadata) + } +} +``` + +### Collection Operations + +```go +// List all collections +collections, err := client.ListCollections(ctx) + +// Delete a collection +_, err = client.DeleteCollection(ctx, "my-vectors") +``` + +### Vector Operations + +```go +// Add vector with metadata +metadata := map[string]interface{}{ + "title": "My Document", + "category": "research", +} +result, err := client.AddVector(ctx, "collection-name", vector, metadata) + +// Search with options +results, err := client.SearchVectors(ctx, queryVector, &embeddings.SearchOptions{ + CollectionName: "my-vectors", // Search in specific collection + Limit: 20, // Return top 20 results +}) + +// Search across multiple collections (exclude some) +results, err := client.SearchVectors(ctx, queryVector, &embeddings.SearchOptions{ + ExcludeCollections: []string{"test-data"}, + Limit: 10, +}) + +// Delete a vector by ID +_, err = client.DeleteVector(ctx, vectorID) +``` + +### Advanced: Direct Tool Access + +```go +// List available MCP tools +tools, err := client.ListTools(ctx) + +// Call any tool directly +result, err := client.CallTool(ctx, "tool-name", map[string]interface{}{ + "param1": "value1", + "param2": 123, +}) +``` + +## Key Differences from Clojure Version + +1. **Simplified API**: Uses `CommandTransport` instead of manual pipe management +2. **Automatic Initialization**: MCP initialization happens during `Connect()` +3. **Strong Typing**: Uses Go structs instead of dynamic maps +4. **Error Handling**: Explicit error returns instead of Clojure's exception model +5. **Concurrency**: Uses `sync.Mutex` instead of Clojure's core.async channels + +## Vector Database Details + +- **Image**: `jimclark106/vector-db:latest` +- **Vector Dimension**: Configurable via the `dimension` parameter (default: 1536 for OpenAI embeddings) + - Pass `0` or negative value to use default (1536) + - Common dimensions: 1536 (OpenAI), 768 (sentence transformers), 384 (MiniLM) +- **Database**: SQLite with vec extension +- **Transport**: stdio (JSON-RPC over stdin/stdout) + +## Requirements + +- Docker daemon running +- Go 1.24+ +- The official MCP Go SDK (`github.com/modelcontextprotocol/go-sdk/mcp`) + +## Architecture + +``` +┌─────────────────┐ +│ Your Go App │ +└────────┬────────┘ + │ + ▼ +┌─────────────────┐ +│ VectorDBClient │ (this package) +└────────┬────────┘ + │ + ▼ +┌─────────────────┐ +│ MCP Client │ (go-sdk/mcp) +└────────┬────────┘ + │ stdio + ▼ +┌─────────────────┐ +│ Docker Container│ (jimclark106/vector-db) +└─────────────────┘ +``` + +## See Also + +- Original Clojure implementation: `test/embeddings/clj/vector_db_process.clj` +- MCP Go SDK: https://github.com/modelcontextprotocol/go-sdk +- Example usage: `example_test.go` diff --git a/pkg/gateway/embeddings/client.go b/pkg/gateway/embeddings/client.go new file mode 100644 index 00000000..efa5837f --- /dev/null +++ b/pkg/gateway/embeddings/client.go @@ -0,0 +1,358 @@ +package embeddings + +import ( + "context" + "encoding/json" + "fmt" + "os/exec" + "sync" + "time" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/log" +) + +// VectorDBClient wraps the MCP client connection to the vector DB server +type VectorDBClient struct { + cmd *exec.Cmd + client *mcp.Client + session *mcp.ClientSession + containerName string + logFunc func(string) + mu sync.Mutex +} + +// NewVectorDBClient creates a new MCP client and starts the vector DB container. +// The dataDir parameter specifies where the vector database will store its data. +// The dimension parameter specifies the vector dimension (default 1536 for OpenAI embeddings). +// The logFunc parameter is optional and can be used to log MCP messages. +func NewVectorDBClient(ctx context.Context, dataDir string, dimension int, logFunc func(string)) (*VectorDBClient, error) { + // Use default dimension if not specified + if dimension <= 0 { + dimension = 1536 + } + + // Generate a unique container name + containerName := fmt.Sprintf("vector-db-%d", time.Now().UnixNano()) + + // Create the docker command to run the vector-db container + cmd := exec.CommandContext(ctx, + "docker", "run", "-i", "--rm", + "--name", containerName, + "--platform", "linux/amd64", + "-v", fmt.Sprintf("%s:/data", dataDir), + "-e", "DB_PATH=/data/vectors.db", + "-e", fmt.Sprintf("VECTOR_DIMENSION=%d", dimension), + "jimclark106/vector-db:latest", + ) + + client := &VectorDBClient{ + cmd: cmd, + containerName: containerName, + logFunc: logFunc, + } + + // Create MCP client with notification handlers + mcpClient := mcp.NewClient( + &mcp.Implementation{ + Name: "vector-db-client", + Version: "1.0.0", + }, + &mcp.ClientOptions{ + LoggingMessageHandler: func(_ context.Context, req *mcp.LoggingMessageRequest) { + if client.logFunc != nil { + msg := fmt.Sprintf("LOG: %s - %s", req.Params.Level, req.Params.Data) + client.logFunc(msg) + } + }, + }, + ) + + // Use CommandTransport which handles all the stdio plumbing + transport := &mcp.CommandTransport{Command: cmd} + + // Connect to the MCP server (this starts the command) + session, err := mcpClient.Connect(ctx, transport, nil) + if err != nil { + return nil, fmt.Errorf("failed to connect to MCP server: %w", err) + } + + client.client = mcpClient + client.session = session + + return client, nil +} + +// IsAlive checks if the container process is still running +func (c *VectorDBClient) IsAlive() bool { + c.mu.Lock() + defer c.mu.Unlock() + + if c.cmd == nil || c.cmd.Process == nil { + return false + } + + // On Unix, sending signal 0 checks if process exists + err := c.cmd.Process.Signal(nil) + return err == nil +} + +// Wait waits for the container to exit and returns any error +func (c *VectorDBClient) Wait() error { + if c.cmd == nil { + return nil + } + return c.cmd.Wait() +} + +// Session returns the MCP client session +func (c *VectorDBClient) Session() *mcp.ClientSession { + c.mu.Lock() + defer c.mu.Unlock() + return c.session +} + +// ListTools lists available tools from the MCP server +func (c *VectorDBClient) ListTools(ctx context.Context) (*mcp.ListToolsResult, error) { + c.mu.Lock() + defer c.mu.Unlock() + + if c.session == nil { + return nil, fmt.Errorf("session not initialized") + } + + ctx, cancel := context.WithTimeout(ctx, 5*time.Second) + defer cancel() + + result, err := c.session.ListTools(ctx, &mcp.ListToolsParams{}) + if err != nil { + return nil, fmt.Errorf("list tools request failed: %w", err) + } + + return result, nil +} + +// CallTool calls a tool on the MCP server with the given name and arguments. +// The arguments parameter accepts any type - the MCP SDK handles JSON marshaling. +func (c *VectorDBClient) CallTool(ctx context.Context, toolName string, arguments any) (*mcp.CallToolResult, error) { + c.mu.Lock() + defer c.mu.Unlock() + + if c.session == nil { + return nil, fmt.Errorf("session not initialized") + } + + ctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + result, err := c.session.CallTool(ctx, &mcp.CallToolParams{ + Name: toolName, + Arguments: arguments, + }) + if err != nil { + return nil, fmt.Errorf("tool call '%s' failed: %w", toolName, err) + } + + return result, nil +} + +// Close closes the MCP client session and stops the Docker container +func (c *VectorDBClient) Close() error { + c.mu.Lock() + defer c.mu.Unlock() + + var sessionErr error + if c.session != nil { + sessionErr = c.session.Close() + } + + log.Log("close the DBClient") + // Stop the Docker container using docker stop + // This properly signals the container to shut down + if c.containerName != "" { + log.Logf("Stopping container: %s", c.containerName) + stopCmd := exec.Command("docker", "stop", "-t", "2", c.containerName) + if err := stopCmd.Run(); err != nil { + // Container might already be stopped or removed - that's fine + log.Logf("Container %s stop result: %v (this is expected if already stopped)", c.containerName, err) + } + // Clear the container name so we don't try to stop it again + c.containerName = "" + } + + // Wait for the docker run process to exit if it hasn't already + // The --rm flag will automatically remove the container after it stops + if c.cmd != nil { + log.Log("Waiting for docker run process to exit") + // Wait will reap the process and clean up resources + // Ignore "wait was already called" or "no child processes" errors + waitErr := c.cmd.Wait() + if waitErr != nil && waitErr.Error() != "exec: Wait was already called" { + log.Logf("Docker run process exited with: %v", waitErr) + } + c.cmd = nil + } + + log.Log("DBClient closed") + return sessionErr +} + +// ================================================== +// Vector DB Tool Operations +// ================================================== + +// Collection represents a vector collection +type Collection struct { + Name string `json:"name"` +} + +// SearchResult represents a single search result +type SearchResult struct { + ID int64 `json:"id"` + Collection string `json:"collection"` + Distance float64 `json:"distance"` + Metadata map[string]any `json:"metadata"` + VectorLength int `json:"vector_length"` +} + +// CreateCollection creates a new vector collection +func (c *VectorDBClient) CreateCollection(ctx context.Context, collectionName string) (*mcp.CallToolResult, error) { + return c.CallTool(ctx, "create_collection", map[string]any{ + "name": collectionName, + }) +} + +// DeleteCollection deletes a collection and all its vectors +func (c *VectorDBClient) DeleteCollection(ctx context.Context, collectionName string) (*mcp.CallToolResult, error) { + return c.CallTool(ctx, "delete_collection", map[string]any{ + "name": collectionName, + }) +} + +// ListCollections lists all vector collections in the database. +// Returns a slice of collection names. +func (c *VectorDBClient) ListCollections(ctx context.Context) ([]string, error) { + result, err := c.CallTool(ctx, "list_collections", map[string]any{}) + if err != nil { + return nil, err + } + + if result.IsError { + return nil, fmt.Errorf("tool returned error: %s", result.Content) + } + + // Parse the result content + if len(result.Content) == 0 { + return nil, fmt.Errorf("empty response from list_collections") + } + + // Extract text from content + var textContent string + for _, content := range result.Content { + if tc, ok := content.(*mcp.TextContent); ok { + textContent = tc.Text + break + } + } + + if textContent == "" { + return nil, fmt.Errorf("no text content in response") + } + + // Parse the JSON response + var collections []string + if err := json.Unmarshal([]byte(textContent), &collections); err != nil { + return nil, fmt.Errorf("failed to parse collections response: %w", err) + } + + return collections, nil +} + +// AddVector adds a vector to a collection (creates collection if it doesn't exist). +// The vector must be a slice of 1536 float64 numbers. +// Metadata is optional. +func (c *VectorDBClient) AddVector(ctx context.Context, collectionName string, vector []float64, metadata map[string]any) (*mcp.CallToolResult, error) { + args := map[string]any{ + "collection_name": collectionName, + "vector": vector, + } + + if metadata != nil { + args["metadata"] = metadata + } + + return c.CallTool(ctx, "add_vector", args) +} + +// DeleteVector deletes a vector by its ID +func (c *VectorDBClient) DeleteVector(ctx context.Context, vectorID int64) (*mcp.CallToolResult, error) { + return c.CallTool(ctx, "delete_vector", map[string]any{ + "id": vectorID, + }) +} + +// SearchOptions contains options for vector search +type SearchOptions struct { + CollectionName string // Search only within this collection + ExcludeCollections []string // Collections to exclude from search + Limit int // Maximum number of results (default 10) +} + +// SearchArgs combines search options with the vector for the search tool call +type SearchArgs struct { + Vector []float64 `json:"vector"` + CollectionName string `json:"collection_name,omitempty"` + ExcludeCollections []string `json:"exclude_collections,omitempty"` + Limit int `json:"limit,omitempty"` +} + +// SearchVectors searches for similar vectors using cosine distance. +// The vector must be a slice of 1536 float64 numbers. +// Returns a slice of search results. +func (c *VectorDBClient) SearchVectors(ctx context.Context, vector []float64, options *SearchOptions) ([]SearchResult, error) { + args := SearchArgs{ + Vector: vector, + } + + if options != nil { + args.CollectionName = options.CollectionName + args.ExcludeCollections = options.ExcludeCollections + args.Limit = options.Limit + } + + result, err := c.CallTool(ctx, "search", args) + if err != nil { + return nil, err + } + + if result.IsError { + return nil, fmt.Errorf("tool returned error: %s", result.Content) + } + + // Parse the result content + if len(result.Content) == 0 { + return nil, fmt.Errorf("empty response from search") + } + + // Extract text from content + var textContent string + for _, content := range result.Content { + if tc, ok := content.(*mcp.TextContent); ok { + textContent = tc.Text + break + } + } + + if textContent == "" { + return nil, fmt.Errorf("no text content in response") + } + + // Parse the JSON response + var results []SearchResult + if err := json.Unmarshal([]byte(textContent), &results); err != nil { + return nil, fmt.Errorf("failed to parse search response: %w", err) + } + + return results, nil +} diff --git a/pkg/gateway/embeddings/client_test.go b/pkg/gateway/embeddings/client_test.go new file mode 100644 index 00000000..4027e5f0 --- /dev/null +++ b/pkg/gateway/embeddings/client_test.go @@ -0,0 +1,151 @@ +package embeddings_test + +import ( + "context" + "os/exec" + "testing" + "time" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" +) + +// TestCloseStopsContainer verifies that Close() stops the Docker container +func TestCloseStopsContainer(t *testing.T) { + if testing.Short() { + t.Skip("Skipping long-running test in short mode") + } + + ctx := context.Background() + + // Create a temporary data directory for the test + tmpDir := t.TempDir() + + // Count containers before starting + countCmd := exec.Command("docker", "ps", "-q", "--filter", "ancestor=jimclark106/vector-db:latest") + beforeOutput, err := countCmd.Output() + if err != nil { + t.Fatalf("Failed to check docker containers: %v", err) + } + containersBefore := len(string(beforeOutput)) + + // Create client (starts container) + client, err := embeddings.NewVectorDBClient(ctx, tmpDir, 1536, func(msg string) { + t.Log(msg) + }) + if err != nil { + t.Fatalf("Failed to create client: %v", err) + } + + // Give the container a moment to start + time.Sleep(1 * time.Second) + + // Verify container is running by checking docker ps + countCmd = exec.Command("docker", "ps", "-q", "--filter", "ancestor=jimclark106/vector-db:latest") + afterStartOutput, err := countCmd.Output() + if err != nil { + t.Fatalf("Failed to check docker containers after start: %v", err) + } + containersAfterStart := len(string(afterStartOutput)) + + if containersAfterStart <= containersBefore { + t.Skip("Container failed to start - image may not be available") + } + + t.Logf("Container started successfully (before: %d, after: %d)", containersBefore, containersAfterStart) + + // Close the client (should stop the container) + if err := client.Close(); err != nil { + t.Errorf("Close() returned error: %v", err) + } + + // Give docker a moment to clean up (the --rm flag should auto-remove) + time.Sleep(1 * time.Second) + + // Verify container is stopped and removed + countCmd = exec.Command("docker", "ps", "-a", "-q", "--filter", "ancestor=jimclark106/vector-db:latest") + afterCloseOutput, err := countCmd.Output() + if err != nil { + t.Fatalf("Failed to check docker containers after close: %v", err) + } + containersAfterClose := len(string(afterCloseOutput)) + + if containersAfterClose > containersBefore { + t.Errorf("Container not cleaned up after Close(). Before: %d, After close: %d", containersBefore, containersAfterClose) + // Show the containers that are still running + showCmd := exec.Command("docker", "ps", "-a", "--filter", "ancestor=jimclark106/vector-db:latest") + output, _ := showCmd.Output() + t.Logf("Remaining containers:\n%s", string(output)) + } else { + t.Logf("Container successfully stopped and removed (before: %d, after: %d)", containersBefore, containersAfterClose) + } +} + +// TestCloseIdempotent verifies that calling Close() multiple times is safe +func TestCloseIdempotent(t *testing.T) { + if testing.Short() { + t.Skip("Skipping long-running test in short mode") + } + + ctx := context.Background() + tmpDir := t.TempDir() + + client, err := embeddings.NewVectorDBClient(ctx, tmpDir, 1536, nil) + if err != nil { + t.Fatalf("Failed to create client: %v", err) + } + + // Close multiple times should not panic or error + if err := client.Close(); err != nil { + t.Errorf("First Close() returned error: %v", err) + } + + if err := client.Close(); err != nil { + t.Errorf("Second Close() returned error: %v", err) + } + + if err := client.Close(); err != nil { + t.Errorf("Third Close() returned error: %v", err) + } +} + +// TestDimensionParameter verifies that different dimension values work correctly +func TestDimensionParameter(t *testing.T) { + if testing.Short() { + t.Skip("Skipping long-running test in short mode") + } + + testCases := []struct { + name string + dimension int + expected int // expected dimension after normalization + }{ + {"Default 1536", 1536, 1536}, + {"Custom 768", 768, 768}, + {"Custom 384", 384, 384}, + {"Zero defaults to 1536", 0, 1536}, + {"Negative defaults to 1536", -1, 1536}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + client, err := embeddings.NewVectorDBClient(ctx, tmpDir, tc.dimension, nil) + if err != nil { + t.Fatalf("Failed to create client with dimension %d: %v", tc.dimension, err) + } + defer client.Close() + + // Give container a moment to start + time.Sleep(1 * time.Second) + + // Verify container is running + if !client.IsAlive() { + t.Skip("Container failed to start") + } + + t.Logf("Successfully created client with dimension %d (expected: %d)", tc.dimension, tc.expected) + }) + } +} diff --git a/pkg/gateway/embeddings/example_test.go b/pkg/gateway/embeddings/example_test.go new file mode 100644 index 00000000..2d931e2d --- /dev/null +++ b/pkg/gateway/embeddings/example_test.go @@ -0,0 +1,138 @@ +package embeddings_test + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" +) + +// Example demonstrates how to use the vector DB client +func Example() { + ctx := context.Background() + + // Create a client which starts the vector DB container + client, err := embeddings.NewVectorDBClient(ctx, "./data", 1536, func(msg string) { + fmt.Println(msg) + }) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + // Check if container is alive + if !client.IsAlive() { + log.Fatal("Container is not running") + } + + // List available tools (connection is already initialized) + toolsResult, err := client.ListTools(ctx) + if err != nil { + log.Fatalf("Failed to list tools: %v", err) + } + fmt.Printf("Available tools: %d\n", len(toolsResult.Tools)) + + // Create a collection + _, err = client.CreateCollection(ctx, "my-collection") + if err != nil { + log.Fatalf("Failed to create collection: %v", err) + } + + // List collections + collections, err := client.ListCollections(ctx) + if err != nil { + log.Fatalf("Failed to list collections: %v", err) + } + fmt.Printf("Collections: %v\n", collections) + + // Add a vector (1536 dimensions) + sampleVector := make([]float64, 1536) + for i := range sampleVector { + sampleVector[i] = 0.1 + } + metadata := map[string]any{ + "name": "test-doc", + } + _, err = client.AddVector(ctx, "my-collection", sampleVector, metadata) + if err != nil { + log.Fatalf("Failed to add vector: %v", err) + } + + // Search for similar vectors + results, err := client.SearchVectors(ctx, sampleVector, &embeddings.SearchOptions{ + CollectionName: "my-collection", + Limit: 5, + }) + if err != nil { + log.Fatalf("Failed to search vectors: %v", err) + } + fmt.Printf("Search results: %d\n", len(results)) + for _, result := range results { + fmt.Printf(" ID: %d, Distance: %f, Collection: %s\n", + result.ID, result.Distance, result.Collection) + } + + // Delete a vector by ID + if len(results) > 0 { + _, err = client.DeleteVector(ctx, results[0].ID) + if err != nil { + log.Fatalf("Failed to delete vector: %v", err) + } + } + + // Delete a collection + _, err = client.DeleteCollection(ctx, "my-collection") + if err != nil { + log.Fatalf("Failed to delete collection: %v", err) + } +} + +// Example_withTimeout demonstrates usage with context timeouts +func Example_withTimeout() { + // Create a context with timeout + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + // Create client with the timeout context + client, err := embeddings.NewVectorDBClient(ctx, "./data", 1536, nil) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + defer client.Close() + + // Perform operations (connection is already initialized) + collections, err := client.ListCollections(ctx) + if err != nil { + log.Fatalf("Failed to list collections: %v", err) + } + fmt.Printf("Collections: %v\n", collections) +} + +// Example_longRunning demonstrates waiting for container completion +func Example_longRunning() { + ctx := context.Background() + + client, err := embeddings.NewVectorDBClient(ctx, "./data", 1536, nil) + if err != nil { + log.Fatalf("Failed to create client: %v", err) + } + + // In a separate goroutine, wait for container to exit + go func() { + if err := client.Wait(); err != nil { + log.Printf("Container exited with error: %v", err) + } else { + log.Println("Container exited successfully") + } + }() + + // Do work with the client (already initialized)... + // For example: client.ListCollections(ctx), client.SearchVectors(ctx, ...), etc. + + // When done, close the client (which stops the container) + if err := client.Close(); err != nil { + log.Printf("Failed to close client: %v", err) + } +} diff --git a/pkg/gateway/embeddings/oci.go b/pkg/gateway/embeddings/oci.go new file mode 100644 index 00000000..5e9d9438 --- /dev/null +++ b/pkg/gateway/embeddings/oci.go @@ -0,0 +1,387 @@ +package embeddings + +import ( + "archive/tar" + "bytes" + "context" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/google/go-containerregistry/pkg/authn" + "github.com/google/go-containerregistry/pkg/name" + "github.com/google/go-containerregistry/pkg/v1/empty" + "github.com/google/go-containerregistry/pkg/v1/mutate" + "github.com/google/go-containerregistry/pkg/v1/remote" + "github.com/google/go-containerregistry/pkg/v1/tarball" + + "github.com/docker/mcp-gateway/pkg/log" + "github.com/docker/mcp-gateway/pkg/user" +) + +const ( + embeddingsImageRef = "jimclark106/embeddings:latest" + vectorDBFileName = "vectors.db" +) + +// Pull downloads the embeddings OCI artifact, extracts it to a temp directory, +// and copies the vector.db directory to ~/.docker/mcp if it doesn't already exist. +// +// Example usage: +// +// go run ./examples/embeddings/pull.go +func Pull(ctx context.Context) error { + // Get the home directory to determine the target path + homeDir, err := user.HomeDir() + if err != nil { + return fmt.Errorf("failed to get home directory: %w", err) + } + + mcpDir := filepath.Join(homeDir, ".docker", "mcp") + targetPath := filepath.Join(mcpDir, vectorDBFileName) + + // Check if vector.db already exists + if _, err := os.Stat(targetPath); err == nil { + log.Logf("Vector database already exists at %s, skipping download", targetPath) + return nil + } + + log.Logf("Downloading embeddings from %s", embeddingsImageRef) + + // Parse the image reference + ref, err := name.ParseReference(embeddingsImageRef) + if err != nil { + return fmt.Errorf("failed to parse image reference: %w", err) + } + + // Pull the image + img, err := remote.Image(ref, remote.WithAuthFromKeychain(authn.DefaultKeychain), remote.WithContext(ctx)) + if err != nil { + return fmt.Errorf("failed to pull image: %w", err) + } + + // Create a temporary directory for extraction + tmpDir, err := os.MkdirTemp("", "embeddings-extract-*") + if err != nil { + return fmt.Errorf("failed to create temp directory: %w", err) + } + defer os.RemoveAll(tmpDir) + + log.Logf("Extracting image to temporary directory: %s", tmpDir) + + // Get all layers + layers, err := img.Layers() + if err != nil { + return fmt.Errorf("failed to get image layers: %w", err) + } + + // Extract each layer + for i, layer := range layers { + if err := extractLayer(layer, tmpDir); err != nil { + return fmt.Errorf("failed to extract layer %d: %w", i, err) + } + } + + // Verify that vector.db directory exists in the extracted content + extractedVectorDB := filepath.Join(tmpDir, vectorDBFileName) + if _, err := os.Stat(extractedVectorDB); os.IsNotExist(err) { + return fmt.Errorf("vectors.db directory not found in extracted image") + } + + // Ensure the target directory exists + if err := os.MkdirAll(mcpDir, 0o755); err != nil { + return fmt.Errorf("failed to create mcp directory: %w", err) + } + + // Copy vector.db directory to ~/.docker/mcp + log.Logf("Copying vector.db to %s", targetPath) + if err := copyDir(extractedVectorDB, targetPath); err != nil { + return fmt.Errorf("failed to copy vector.db directory: %w", err) + } + + log.Logf("Successfully installed vector database at %s", targetPath) + return nil +} + +// extractLayer extracts a single layer (tar archive) to the destination directory +func extractLayer(layer interface{ Uncompressed() (io.ReadCloser, error) }, destDir string) error { + rc, err := layer.Uncompressed() + if err != nil { + return fmt.Errorf("failed to get layer content: %w", err) + } + defer rc.Close() + + tarReader := tar.NewReader(rc) + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return fmt.Errorf("failed to read tar header: %w", err) + } + + // Prevent zip slip vulnerability by validating the target path + // Reject absolute paths + if filepath.IsAbs(header.Name) { + return fmt.Errorf("invalid tar entry path (absolute path not allowed): %s", header.Name) + } + + target := filepath.Join(destDir, header.Name) + + // Resolve any previously-extracted symbolic links in the target path + // This prevents symlink chaining attacks where a symlink could be used + // to escape the destination directory + resolvedTarget, err := filepath.EvalSymlinks(target) + if err != nil { + // If EvalSymlinks fails (e.g., path doesn't exist yet), fall back to Clean + // This is expected for new files/dirs that haven't been created yet + resolvedTarget = filepath.Clean(target) + } + + cleanedDestDir := filepath.Clean(destDir) + + // Use filepath.Rel to check if resolved target is within destDir + // If the relative path starts with "..", it's trying to escape + relPath, err := filepath.Rel(cleanedDestDir, resolvedTarget) + if err != nil || len(relPath) == 0 || (relPath[0] == '.' && len(relPath) > 1 && relPath[1] == '.') { + return fmt.Errorf("invalid tar entry path (potential path traversal): %s", header.Name) + } + + target = filepath.Clean(target) + + switch header.Typeflag { + case tar.TypeDir: + if err := os.MkdirAll(target, os.FileMode(header.Mode)); err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } + + case tar.TypeReg: + // Ensure parent directory exists + if err := os.MkdirAll(filepath.Dir(target), 0o755); err != nil { + return fmt.Errorf("failed to create parent directory: %w", err) + } + + file, err := os.OpenFile(target, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.FileMode(header.Mode)) + if err != nil { + return fmt.Errorf("failed to create file: %w", err) + } + + if _, err := io.Copy(file, tarReader); err != nil { + file.Close() + return fmt.Errorf("failed to write file: %w", err) + } + file.Close() + + case tar.TypeSymlink: + // Handle symlinks - validate the link target to prevent symlink attacks + // Reject absolute symlink targets + if filepath.IsAbs(header.Linkname) { + return fmt.Errorf("invalid symlink target (absolute path not allowed): %s -> %s", header.Name, header.Linkname) + } + + // Calculate where the symlink target would resolve to + // The symlink is created at 'target', and points to 'header.Linkname' + linkTargetPath := filepath.Join(filepath.Dir(target), header.Linkname) + + // Resolve any previously-extracted symbolic links in the target path + // This prevents symlink chaining attacks + resolvedLinkTarget, err := filepath.EvalSymlinks(linkTargetPath) + if err != nil { + // If EvalSymlinks fails, fall back to Clean (target doesn't exist yet) + resolvedLinkTarget = filepath.Clean(linkTargetPath) + } + + // Ensure the symlink target is within the destination directory + relLinkPath, err := filepath.Rel(cleanedDestDir, resolvedLinkTarget) + if err != nil || len(relLinkPath) == 0 || (relLinkPath[0] == '.' && len(relLinkPath) > 1 && relLinkPath[1] == '.') { + return fmt.Errorf("invalid symlink target (potential path traversal): %s -> %s", header.Name, header.Linkname) + } + + if err := os.Symlink(header.Linkname, target); err != nil { + return fmt.Errorf("failed to create symlink: %w", err) + } + + default: + // Skip other types (block devices, etc.) + log.Logf("Skipping unsupported tar entry type %d: %s", header.Typeflag, header.Name) + } + } + + return nil +} + +// copyDir recursively copies a directory from src to dst +func copyDir(src, dst string) error { + return filepath.Walk(src, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Calculate the relative path + relPath, err := filepath.Rel(src, path) + if err != nil { + return fmt.Errorf("failed to get relative path: %w", err) + } + + targetPath := filepath.Join(dst, relPath) + + if info.IsDir() { + // Create directory + return os.MkdirAll(targetPath, info.Mode()) + } + + // Copy file + return copyFile(path, targetPath, info.Mode()) + }) +} + +// copyFile copies a single file from src to dst with the specified mode +func copyFile(src, dst string, mode os.FileMode) error { + sourceFile, err := os.Open(src) + if err != nil { + return fmt.Errorf("failed to open source file: %w", err) + } + defer sourceFile.Close() + + // Ensure parent directory exists + if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil { + return fmt.Errorf("failed to create parent directory: %w", err) + } + + destFile, err := os.OpenFile(dst, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, mode) + if err != nil { + return fmt.Errorf("failed to create destination file: %w", err) + } + defer destFile.Close() + + if _, err := io.Copy(destFile, sourceFile); err != nil { + return fmt.Errorf("failed to copy file content: %w", err) + } + + return nil +} + +// Push creates an OCI artifact containing the vector database directory and pushes it to the specified OCI reference. +// The directory will always be named "vectors.db" in the OCI artifact regardless of the source directory name. +// +// Example usage: +// +// go run ./examples/embeddings/push.go ~/.docker/mcp/vectors.db jimclark106/embeddings:v1.0 +func Push(ctx context.Context, vectorDBPath string, ociRef string) error { + log.Logf("Pushing vector database from %s to %s", vectorDBPath, ociRef) + + // Verify that the source directory exists + if _, err := os.Stat(vectorDBPath); os.IsNotExist(err) { + return fmt.Errorf("vectors.db directory not found at %s", vectorDBPath) + } + + // Parse the OCI reference + ref, err := name.ParseReference(ociRef) + if err != nil { + return fmt.Errorf("failed to parse OCI reference: %w", err) + } + + // Create a tar archive from the vector.db directory + log.Logf("Creating tar archive from %s", vectorDBPath) + tarBuffer, err := createTarFromDirectory(vectorDBPath) + if err != nil { + return fmt.Errorf("failed to create tar archive: %w", err) + } + + // Create a layer from the tar archive + layer, err := tarball.LayerFromOpener(func() (io.ReadCloser, error) { + return io.NopCloser(bytes.NewReader(tarBuffer.Bytes())), nil + }) + if err != nil { + return fmt.Errorf("failed to create layer from tar: %w", err) + } + + // Start with an empty image + img := empty.Image + + // Add the layer to the image + img, err = mutate.AppendLayers(img, layer) + if err != nil { + return fmt.Errorf("failed to append layer to image: %w", err) + } + + // Push the image to the registry + log.Logf("Pushing image to %s", ociRef) + if err := remote.Write(ref, img, remote.WithAuthFromKeychain(authn.DefaultKeychain), remote.WithContext(ctx)); err != nil { + return fmt.Errorf("failed to push image: %w", err) + } + + log.Logf("Successfully pushed vector database to %s", ociRef) + return nil +} + +// createTarFromDirectory creates a tar archive from the specified directory +func createTarFromDirectory(srcDir string) (*bytes.Buffer, error) { + var buf bytes.Buffer + tw := tar.NewWriter(&buf) + defer tw.Close() + + // Walk the directory tree and add files to the tar archive + err := filepath.Walk(srcDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Get the relative path from the source directory + relPath, err := filepath.Rel(srcDir, path) + if err != nil { + return fmt.Errorf("failed to get relative path: %w", err) + } + + // Create tar header + header, err := tar.FileInfoHeader(info, "") + if err != nil { + return fmt.Errorf("failed to create tar header: %w", err) + } + + // Always use vectorDBFileName as the root directory name in the archive + // This ensures consistency regardless of the source directory name + if relPath == "." { + header.Name = vectorDBFileName + } else { + header.Name = filepath.Join(vectorDBFileName, relPath) + } + + // Handle symlinks + if info.Mode()&os.ModeSymlink != 0 { + linkTarget, err := os.Readlink(path) + if err != nil { + return fmt.Errorf("failed to read symlink: %w", err) + } + header.Linkname = linkTarget + } + + // Write the header + if err := tw.WriteHeader(header); err != nil { + return fmt.Errorf("failed to write tar header: %w", err) + } + + // If it's a regular file, write the content + if info.Mode().IsRegular() { + file, err := os.Open(path) + if err != nil { + return fmt.Errorf("failed to open file: %w", err) + } + defer file.Close() + + if _, err := io.Copy(tw, file); err != nil { + return fmt.Errorf("failed to write file to tar: %w", err) + } + } + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to walk directory: %w", err) + } + + return &buf, nil +} diff --git a/pkg/gateway/embeddings/oci_test.go b/pkg/gateway/embeddings/oci_test.go new file mode 100644 index 00000000..f222d756 --- /dev/null +++ b/pkg/gateway/embeddings/oci_test.go @@ -0,0 +1,280 @@ +package embeddings + +import ( + "archive/tar" + "bytes" + "io" + "path/filepath" + "testing" +) + +// TestExtractLayerPathTraversalPrevention tests that the extractLayer function +// properly prevents path traversal attacks (zip slip vulnerability) +func TestExtractLayerPathTraversalPrevention(t *testing.T) { + tests := []struct { + name string + tarEntries []tarEntry + shouldError bool + description string + }{ + { + name: "legitimate nested path", + tarEntries: []tarEntry{ + {name: "vectors.db/data.db", content: "legitimate content", isDir: false}, + }, + shouldError: false, + description: "should allow legitimate nested paths", + }, + { + name: "path traversal with ..", + tarEntries: []tarEntry{ + {name: "../../etc/passwd", content: "malicious content", isDir: false}, + }, + shouldError: true, + description: "should reject paths with .. that escape destination", + }, + { + name: "absolute path", + tarEntries: []tarEntry{ + {name: "/etc/passwd", content: "malicious content", isDir: false}, + }, + shouldError: true, + description: "should reject absolute paths that escape destination", + }, + { + name: "path traversal in middle", + tarEntries: []tarEntry{ + {name: "vectors.db/../../etc/passwd", content: "malicious content", isDir: false}, + }, + shouldError: true, + description: "should reject paths with .. in the middle that escape", + }, + { + name: "legitimate .. that stays within destination", + tarEntries: []tarEntry{ + {name: "vectors.db/subdir", content: "", isDir: true}, + {name: "vectors.db/subdir/../file.db", content: "legitimate content", isDir: false}, + }, + shouldError: false, + description: "should allow .. if it resolves within destination", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a temporary directory for extraction + destDir := t.TempDir() + + // Create a tar archive with the test entries + tarBuffer := createTestTar(t, tt.tarEntries) + + // Create a mock layer + layer := &mockLayer{data: tarBuffer.Bytes()} + + // Try to extract the layer + err := extractLayer(layer, destDir) + + if tt.shouldError { + if err == nil { + t.Errorf("%s: expected error but got none", tt.description) + } + } else { + if err != nil { + t.Errorf("%s: unexpected error: %v", tt.description, err) + } + } + }) + } +} + +// tarEntry represents a single entry in a tar archive for testing +type tarEntry struct { + name string + content string + isDir bool +} + +// createTestTar creates a tar archive with the given entries +func createTestTar(t *testing.T, entries []tarEntry) *bytes.Buffer { + t.Helper() + + var buf bytes.Buffer + tw := tar.NewWriter(&buf) + defer tw.Close() + + for _, entry := range entries { + var header *tar.Header + if entry.isDir { + header = &tar.Header{ + Name: entry.name, + Mode: 0o755, + Typeflag: tar.TypeDir, + } + } else { + header = &tar.Header{ + Name: entry.name, + Mode: 0o644, + Size: int64(len(entry.content)), + Typeflag: tar.TypeReg, + } + } + + if err := tw.WriteHeader(header); err != nil { + t.Fatalf("failed to write tar header: %v", err) + } + + if !entry.isDir { + if _, err := tw.Write([]byte(entry.content)); err != nil { + t.Fatalf("failed to write tar content: %v", err) + } + } + } + + return &buf +} + +// mockLayer implements the interface required by extractLayer +type mockLayer struct { + data []byte +} + +func (m *mockLayer) Uncompressed() (io.ReadCloser, error) { + return io.NopCloser(bytes.NewReader(m.data)), nil +} + +// TestExtractLayerSymlinkSafety tests that symlinks are handled safely +func TestExtractLayerSymlinkSafety(t *testing.T) { + tests := []struct { + name string + symlinkName string + symlinkDest string + shouldError bool + description string + }{ + { + name: "legitimate relative symlink", + symlinkName: "vectors.db/link", + symlinkDest: "data.db", + shouldError: false, + description: "should allow relative symlinks within destination", + }, + { + name: "absolute symlink target", + symlinkName: "vectors.db/link", + symlinkDest: "/etc/passwd", + shouldError: true, + description: "should reject absolute symlink targets", + }, + { + name: "symlink escaping with ..", + symlinkName: "vectors.db/link", + symlinkDest: "../../etc/passwd", + shouldError: true, + description: "should reject symlinks that escape destination directory", + }, + { + name: "symlink to parent that stays within", + symlinkName: "vectors.db/subdir/link", + symlinkDest: "../data.db", + shouldError: false, + description: "should allow .. if it resolves within destination", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + destDir := t.TempDir() + + // Create a tar with a directory and a symlink + var buf bytes.Buffer + tw := tar.NewWriter(&buf) + + // Add the parent directory first + dirHeader := &tar.Header{ + Name: "vectors.db/", + Mode: 0o755, + Typeflag: tar.TypeDir, + } + if err := tw.WriteHeader(dirHeader); err != nil { + t.Fatalf("failed to write directory header: %v", err) + } + + // Add subdirectory if needed + if filepath.Dir(tt.symlinkName) != "vectors.db" { + subdirHeader := &tar.Header{ + Name: filepath.Dir(tt.symlinkName) + "/", + Mode: 0o755, + Typeflag: tar.TypeDir, + } + if err := tw.WriteHeader(subdirHeader); err != nil { + t.Fatalf("failed to write subdirectory header: %v", err) + } + } + + // Add the symlink + header := &tar.Header{ + Name: tt.symlinkName, + Linkname: tt.symlinkDest, + Typeflag: tar.TypeSymlink, + } + if err := tw.WriteHeader(header); err != nil { + t.Fatalf("failed to write symlink header: %v", err) + } + + tw.Close() + + layer := &mockLayer{data: buf.Bytes()} + + // Extract and check result + err := extractLayer(layer, destDir) + + if tt.shouldError { + if err == nil { + t.Errorf("%s: expected error but got none", tt.description) + } + } else { + if err != nil { + t.Errorf("%s: unexpected error: %v", tt.description, err) + } + } + }) + } +} + +// TestExtractLayerSymlinkChaining tests protection against symlink chaining attacks +func TestExtractLayerSymlinkChaining(t *testing.T) { + destDir := t.TempDir() + + // Create a malicious tar with symlink chaining: + // 1. vectors.db/link -> .. (points outside destDir to parent directory) + // 2. vectors.db/escape -> link/.. (chains through the symlink to escape further) + var buf bytes.Buffer + tw := tar.NewWriter(&buf) + + // Add directory + if err := tw.WriteHeader(&tar.Header{Name: "vectors.db/", Mode: 0o755, Typeflag: tar.TypeDir}); err != nil { + t.Fatalf("failed to write header: %v", err) + } + + // Add first symlink that points outside: vectors.db/link -> ../.. + // This creates: destDir/vectors.db/link -> ../.. which resolves to parent of destDir + if err := tw.WriteHeader(&tar.Header{ + Name: "vectors.db/link", + Linkname: "../..", + Typeflag: tar.TypeSymlink, + }); err != nil { + t.Fatalf("failed to write symlink header: %v", err) + } + + tw.Close() + + layer := &mockLayer{data: buf.Bytes()} + + // This should fail because the symlink escapes the destination directory + err := extractLayer(layer, destDir) + if err == nil { + t.Error("Expected error for symlink chaining attack, but extraction succeeded") + } else { + t.Logf("Symlink chaining attack correctly blocked: %v", err) + } +} diff --git a/pkg/gateway/findmcps.go b/pkg/gateway/findmcps.go new file mode 100644 index 00000000..730c896f --- /dev/null +++ b/pkg/gateway/findmcps.go @@ -0,0 +1,319 @@ +package gateway + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/catalog" + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" + "github.com/docker/mcp-gateway/pkg/log" +) + +// maxInt returns the maximum of two integers +func maxInt(a, b int) int { + if a > b { + return a + } + return b +} + +// ServerMatch represents a search result +type ServerMatch struct { + Name string + Server catalog.Server + Score int +} + +func keywordStrategy(configuration Configuration) mcp.ToolHandler { + return func(_ context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Query string `json:"query"` + Limit int `json:"limit"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Query == "" { + return nil, fmt.Errorf("query parameter is required") + } + + if params.Limit <= 0 { + params.Limit = 10 + } + + // Search through the catalog servers + query := strings.ToLower(strings.TrimSpace(params.Query)) + var matches []ServerMatch + + for serverName, server := range configuration.servers { + match := false + score := 0 + + // Check server name (exact match gets higher score) + serverNameLower := strings.ToLower(serverName) + if serverNameLower == query { + match = true + score = 100 + } else if strings.Contains(serverNameLower, query) { + match = true + score = 50 + } + + // Check server title + if server.Title != "" { + titleLower := strings.ToLower(server.Title) + if titleLower == query { + match = true + score = maxInt(score, 97) + } else if strings.Contains(titleLower, query) { + match = true + score = maxInt(score, 47) + } + } + + // Check server description + if server.Description != "" { + descriptionLower := strings.ToLower(server.Description) + if descriptionLower == query { + match = true + score = maxInt(score, 95) + } else if strings.Contains(descriptionLower, query) { + match = true + score = maxInt(score, 45) + } + } + + // Check if it has tools that might match + for _, tool := range server.Tools { + toolNameLower := strings.ToLower(tool.Name) + toolDescLower := strings.ToLower(tool.Description) + + if toolNameLower == query { + match = true + score = maxInt(score, 90) + } else if strings.Contains(toolNameLower, query) { + match = true + score = maxInt(score, 40) + } else if strings.Contains(toolDescLower, query) { + match = true + score = maxInt(score, 30) + } + } + + // Check image name + if server.Image != "" { + imageLower := strings.ToLower(server.Image) + if strings.Contains(imageLower, query) { + match = true + score = maxInt(score, 20) + } + } + + if match { + matches = append(matches, ServerMatch{ + Name: serverName, + Server: server, + Score: score, + }) + } + } + + // Sort matches by score (higher scores first) + for i := range len(matches) - 1 { + for j := i + 1; j < len(matches); j++ { + if matches[i].Score < matches[j].Score { + matches[i], matches[j] = matches[j], matches[i] + } + } + } + + // Limit results + if len(matches) > params.Limit { + matches = matches[:params.Limit] + } + + // Format results + var results []map[string]any + for _, match := range matches { + serverInfo := map[string]any{ + "name": match.Name, + } + + if match.Server.Description != "" { + serverInfo["description"] = match.Server.Description + } + + if len(match.Server.Secrets) > 0 { + var secrets []string + for _, secret := range match.Server.Secrets { + secrets = append(secrets, secret.Name) + } + serverInfo["required_secrets"] = secrets + } + + if len(match.Server.Config) > 0 { + serverInfo["config_schema"] = match.Server.Config + } + + serverInfo["long_lived"] = match.Server.LongLived + + results = append(results, serverInfo) + } + + response := map[string]any{ + "prompt": params.Query, + "total_matches": len(results), + "servers": results, + } + + responseBytes, err := json.Marshal(response) + if err != nil { + return nil, fmt.Errorf("failed to marshal response: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{Text: string(responseBytes)}}, + }, nil + } +} + +func embeddingStrategy(g *Gateway) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Query string `json:"query"` + Limit int `json:"limit"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Query == "" { + return nil, fmt.Errorf("query parameter is required") + } + + if params.Limit <= 0 { + params.Limit = 10 + } + + // Use vector similarity search to find relevant servers + results, err := g.findServersByEmbedding(ctx, params.Query, params.Limit) + if err != nil { + return nil, fmt.Errorf("failed to find servers: %w", err) + } + + response := map[string]any{ + "prompt": params.Query, + "total_matches": len(results), + "servers": results, + } + + responseBytes, err := json.Marshal(response) + if err != nil { + return nil, fmt.Errorf("failed to marshal response: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{Text: string(responseBytes)}}, + }, nil + } +} + +// findServersByEmbedding finds relevant MCP servers using vector similarity search +func (g *Gateway) findServersByEmbedding(ctx context.Context, query string, limit int) ([]map[string]any, error) { + if g.embeddingsClient == nil { + return nil, fmt.Errorf("embeddings client not initialized") + } + + // Generate embedding for the query + queryVector, err := generateEmbedding(ctx, query) + if err != nil { + return nil, fmt.Errorf("failed to generate embedding: %w", err) + } + + // Search for similar servers in mcp-server-collection only + results, err := g.embeddingsClient.SearchVectors(ctx, queryVector, &embeddings.SearchOptions{ + CollectionName: "mcp-server-collection", + Limit: limit, + }) + if err != nil { + return nil, fmt.Errorf("failed to search vectors: %w", err) + } + + // Map results to servers from catalog + var servers []map[string]any + for _, result := range results { + // Extract server name from metadata + serverNameInterface, ok := result.Metadata["name"] + if !ok { + log.Logf("Warning: search result %d missing 'name' in metadata", result.ID) + continue + } + + serverName, ok := serverNameInterface.(string) + if !ok { + log.Logf("Warning: server name is not a string: %v", serverNameInterface) + continue + } + + // Look up the server in the catalog + server, _, found := g.configuration.Find(serverName) + if !found { + log.Logf("Warning: server %s not found in catalog", serverName) + continue + } + + // Build server info map (same format as mcp-find) + serverInfo := map[string]any{ + "name": serverName, + } + + if server.Spec.Description != "" { + serverInfo["description"] = server.Spec.Description + } + + if len(server.Spec.Secrets) > 0 { + var secrets []string + for _, secret := range server.Spec.Secrets { + secrets = append(secrets, secret.Name) + } + serverInfo["required_secrets"] = secrets + } + + if len(server.Spec.Config) > 0 { + serverInfo["config_schema"] = server.Spec.Config + } + + serverInfo["long_lived"] = server.Spec.LongLived + + servers = append(servers, serverInfo) + } + + return servers, nil +} diff --git a/pkg/gateway/findtools.go b/pkg/gateway/findtools.go new file mode 100644 index 00000000..a6fcb464 --- /dev/null +++ b/pkg/gateway/findtools.go @@ -0,0 +1,212 @@ +package gateway + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "os" + + "github.com/google/jsonschema-go/jsonschema" + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" + "github.com/docker/mcp-gateway/pkg/log" +) + +// generateEmbedding generates an embedding vector from text using OpenAI's API +func generateEmbedding(ctx context.Context, text string) ([]float64, error) { + apiKey := os.Getenv("OPENAI_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("OPENAI_API_KEY environment variable not set") + } + + type embeddingRequest struct { + Input string `json:"input"` + Model string `json:"model"` + } + + type embeddingResponse struct { + Data []struct { + Embedding []float64 `json:"embedding"` + } `json:"data"` + } + + reqBody, err := json.Marshal(embeddingRequest{ + Input: text, + Model: "text-embedding-3-small", + }) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://api.openai.com/v1/embeddings", bytes.NewReader(reqBody)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+apiKey) + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to make request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("API returned status %d", resp.StatusCode) + } + + var embResp embeddingResponse + if err := json.NewDecoder(resp.Body).Decode(&embResp); err != nil { + return nil, fmt.Errorf("failed to decode response: %w", err) + } + + if len(embResp.Data) == 0 { + return nil, fmt.Errorf("no embedding returned") + } + + return embResp.Data[0].Embedding, nil +} + +// findToolsByEmbedding finds relevant tools using vector similarity search +func (g *Gateway) findToolsByEmbedding(ctx context.Context, prompt string) ([]map[string]any, error) { + if g.embeddingsClient == nil { + return nil, fmt.Errorf("embeddings client not initialized") + } + + // Generate embedding for the prompt + queryVector, err := generateEmbedding(ctx, prompt) + if err != nil { + return nil, fmt.Errorf("failed to generate embedding: %w", err) + } + + // Search for similar tools, excluding the mcp-server-collection + results, err := g.embeddingsClient.SearchVectors(ctx, queryVector, &embeddings.SearchOptions{ + ExcludeCollections: []string{"mcp-server-collection"}, + Limit: 5, + }) + if err != nil { + return nil, fmt.Errorf("failed to search vectors: %w", err) + } + + // Map results to tools in tools/list format + var tools []map[string]any + for _, result := range results { + // Extract tool name from metadata + toolNameInterface, ok := result.Metadata["tool"] + if !ok { + log.Logf("Warning: search result %d missing 'tool' in metadata", result.ID) + continue + } + + // Handle nested structure: metadata.tool.name + var toolName string + switch v := toolNameInterface.(type) { + case map[string]any: + if nameInterface, ok := v["name"]; ok { + toolName, _ = nameInterface.(string) + } + case string: + toolName = v + } + + if toolName == "" { + log.Logf("Warning: could not extract tool name from metadata: %v", result.Metadata) + continue + } + + // Look up the tool registration + toolReg, ok := g.toolRegistrations[toolName] + if !ok { + log.Logf("Warning: tool %s not found in registrations", toolName) + continue + } + + // Build tool map in tools/list format + toolMap := map[string]any{ + "name": toolReg.Tool.Name, + "description": toolReg.Tool.Description, + } + if toolReg.Tool.InputSchema != nil { + toolMap["inputSchema"] = toolReg.Tool.InputSchema + } + + tools = append(tools, toolMap) + } + + return tools, nil +} + +// createFindToolsTool implements a tool for finding relevant tools based on a user's task description +func (g *Gateway) createFindToolsTool(_ *clientConfig) *ToolRegistration { + tool := &mcp.Tool{ + Name: "find-tools", + Description: "Analyze a task description and recommend relevant MCP tools that could help accomplish it. Uses AI to intelligently match your needs to available tools.", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "prompt": { + Type: "string", + Description: "Description of the task or goal you want to accomplish. An AI will analyze this and recommend relevant tools from the available inventory.", + }, + }, + Required: []string{"prompt"}, + }, + } + + handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Prompt string `json:"prompt"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Prompt == "" { + return nil, fmt.Errorf("prompt parameter is required") + } + + // Use vector similarity search to find relevant tools + tools, err := g.findToolsByEmbedding(ctx, params.Prompt) + if err != nil { + return nil, fmt.Errorf("failed to find tools: %w", err) + } + + // Format response in tools/list format + response := map[string]any{ + "tools": tools, + } + + responseJSON, err := json.MarshalIndent(response, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to marshal response: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: string(responseJSON), + }}, + }, nil + } + + return &ToolRegistration{ + ServerName: "", // Internal tool + Tool: tool, + Handler: handler, + } +} diff --git a/pkg/gateway/mcpadd.go b/pkg/gateway/mcpadd.go index ed8ce2ad..2d76e238 100644 --- a/pkg/gateway/mcpadd.go +++ b/pkg/gateway/mcpadd.go @@ -21,28 +21,8 @@ import ( "github.com/docker/mcp-gateway/pkg/oci" ) -// mcpAddTool implements a tool for adding new servers to the registry -func (g *Gateway) createMcpAddTool(clientConfig *clientConfig) *ToolRegistration { - tool := &mcp.Tool{ - Name: "mcp-add", - Description: "Add a new MCP server to the session. The server must exist in the catalog.", - InputSchema: &jsonschema.Schema{ - Type: "object", - Properties: map[string]*jsonschema.Schema{ - "name": { - Type: "string", - Description: "Name of the MCP server to add to the registry (must exist in catalog)", - }, - "activate": { - Type: "boolean", - Description: "Activate all of the server's tools in the current session", - }, - }, - Required: []string{"name"}, - }, - } - - handler := func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { +func addServerHandler(g *Gateway, clientConfig *clientConfig) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { // Parse parameters var params struct { Name string `json:"name"` @@ -302,13 +282,61 @@ func (g *Gateway) createMcpAddTool(clientConfig *clientConfig) *ToolRegistration }}, }, nil } +} + +func removeServerHandler(g *Gateway) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Name string `json:"name"` + } - return &ToolRegistration{ - Tool: tool, - Handler: withToolTelemetry("mcp-add", handler), + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Name == "" { + return nil, fmt.Errorf("name parameter is required") + } + + serverName := strings.TrimSpace(params.Name) + + // Remove the server from the current serverNames + updatedServerNames := slices.DeleteFunc(slices.Clone(g.configuration.serverNames), func(name string) bool { + return name == serverName + }) + + // Update the current configuration state + g.configuration.serverNames = updatedServerNames + + // Stop OAuth provider if this is an OAuth server + if g.McpOAuthDcrEnabled { + g.stopProvider(serverName) + } + + if err := g.removeServerConfiguration(ctx, serverName); err != nil { + return nil, fmt.Errorf("failed to remove server configuration: %w", err) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Successfully removed server '%s'.", serverName), + }}, + }, nil } } +// mcpAddTool implements a tool for adding new servers to the registry + // shortenURL creates a shortened URL using Bitly's API // It returns the shortened URL or an error if the request fails func shortenURL(ctx context.Context, longURL string) (string, error) { diff --git a/pkg/gateway/mcpexec.go b/pkg/gateway/mcpexec.go new file mode 100644 index 00000000..0eda7ffb --- /dev/null +++ b/pkg/gateway/mcpexec.go @@ -0,0 +1,89 @@ +package gateway + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/log" +) + +func addMcpExecHandler(g *Gateway) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + Name string `json:"name"` + Arguments json.RawMessage `json:"arguments"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.Name == "" { + return nil, fmt.Errorf("name parameter is required") + } + + toolName := strings.TrimSpace(params.Name) + + // Look up the tool in current tool registrations + g.capabilitiesMu.RLock() + toolReg, found := g.toolRegistrations[toolName] + g.capabilitiesMu.RUnlock() + + if !found { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Error: Tool '%s' not found in current session. Make sure the server providing this tool is added to the session.", toolName), + }}, + }, nil + } + + // Handle the case where arguments might be a JSON-encoded string + // This happens when the schema previously specified Type: "string" + var toolArguments json.RawMessage + if len(params.Arguments) > 0 { + // Try to unmarshal as a string first (for backward compatibility) + var argString string + if err := json.Unmarshal(params.Arguments, &argString); err == nil { + // It was a JSON string, use the unescaped content + toolArguments = json.RawMessage(argString) + } else { + // It's already a proper JSON object/value + toolArguments = params.Arguments + } + } + + // Create a new CallToolRequest with the provided arguments + log.Logf("calling tool %s with %s", toolName, toolArguments) + toolCallRequest := &mcp.CallToolRequest{ + Session: req.Session, + Params: &mcp.CallToolParamsRaw{ + Meta: req.Params.Meta, + Name: toolName, + Arguments: toolArguments, + }, + Extra: req.Extra, + } + + // Execute the tool using its registered handler + result, err := toolReg.Handler(ctx, toolCallRequest) + if err != nil { + return nil, fmt.Errorf("tool execution failed: %w", err) + } + + return result, nil + } +} diff --git a/pkg/gateway/registry.go b/pkg/gateway/registry.go new file mode 100644 index 00000000..8b3bfd9a --- /dev/null +++ b/pkg/gateway/registry.go @@ -0,0 +1,181 @@ +package gateway + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/modelcontextprotocol/go-sdk/mcp" + + "github.com/docker/mcp-gateway/pkg/catalog" + "github.com/docker/mcp-gateway/pkg/log" + "github.com/docker/mcp-gateway/pkg/oci" +) + +// readServersFromURL fetches and parses server definitions from a URL +// +//nolint:unused // TODO: This function will be used when registry import feature is enabled +func (g *Gateway) readServersFromURL(ctx context.Context, url string) (map[string]catalog.Server, error) { + servers := make(map[string]catalog.Server) + + log.Log(fmt.Sprintf(" - Reading servers from URL: %s", url)) + + // Create HTTP request with context + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create HTTP request: %w", err) + } + + // Set a reasonable user agent + req.Header.Set("User-Agent", "docker-mcp-gateway/1.0.0") + + // Make the HTTP request + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to fetch URL: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("HTTP request failed with status %d: %s", resp.StatusCode, resp.Status) + } + + // Read the response body + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + // Try to parse as oci.ServerDetail (the new structure) + var serverDetail oci.ServerDetail + if err := json.Unmarshal(body, &serverDetail); err == nil && serverDetail.Name != "" { + // Successfully parsed as ServerDetail - convert to catalog.Server + server := serverDetail.ToCatalogServer() + + serverName := serverDetail.Name + servers[serverName] = server + log.Log(fmt.Sprintf(" - Added server '%s' from URL %s", serverName, url)) + return servers, nil + } + + return nil, fmt.Errorf("unable to parse response as OCI catalog or direct catalog format") +} + +//nolint:unused // TODO: This handler will be used when registry import feature is enabled +func registryImportHandler(g *Gateway, configuration Configuration) mcp.ToolHandler { + return func(ctx context.Context, req *mcp.CallToolRequest) (*mcp.CallToolResult, error) { + // Parse parameters + var params struct { + URL string `json:"url"` + } + + if req.Params.Arguments == nil { + return nil, fmt.Errorf("missing arguments") + } + + paramsBytes, err := json.Marshal(req.Params.Arguments) + if err != nil { + return nil, fmt.Errorf("failed to marshal arguments: %w", err) + } + + if err := json.Unmarshal(paramsBytes, ¶ms); err != nil { + return nil, fmt.Errorf("failed to parse arguments: %w", err) + } + + if params.URL == "" { + return nil, fmt.Errorf("url parameter is required") + } + + registryURL := strings.TrimSpace(params.URL) + + // Validate URL scheme + if !strings.HasPrefix(registryURL, "http://") && !strings.HasPrefix(registryURL, "https://") { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Error: URL must start with http:// or https://, got: %s", registryURL), + }}, + }, nil + } + + // Fetch servers from the URL + servers, err := g.readServersFromURL(ctx, registryURL) + if err != nil { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("Error fetching servers from URL %s: %v", registryURL, err), + }}, + }, nil + } + + if len(servers) == 0 { + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: fmt.Sprintf("No servers found at URL: %s", registryURL), + }}, + }, nil + } + + // Add the imported servers to the current configuration and build detailed summary + var importedServerNames []string + var serverSummaries []string + + for serverName, server := range servers { + if _, exists := configuration.servers[serverName]; exists { + log.Log(fmt.Sprintf("Warning: server '%s' from URL %s overwrites existing server", serverName, registryURL)) + } + configuration.servers[serverName] = server + importedServerNames = append(importedServerNames, serverName) + + // Build detailed summary for this server + summary := fmt.Sprintf("• %s", serverName) + + if server.Description != "" { + summary += fmt.Sprintf("\n Description: %s", server.Description) + } + + if server.Image != "" { + summary += fmt.Sprintf("\n Image: %s", server.Image) + } + + // List required secrets + if len(server.Secrets) > 0 { + var secretNames []string + for _, secret := range server.Secrets { + secretNames = append(secretNames, secret.Name) + } + summary += fmt.Sprintf("\n Required Secrets: %s", strings.Join(secretNames, ", ")) + summary += "\n ⚠️ Configure these secrets before using this server" + } + + // List configuration schemas available + if len(server.Config) > 0 { + summary += fmt.Sprintf("\n Configuration Schemas: %d available", len(server.Config)) + summary += "\n ℹ️ Use mcp-config-set to configure optional settings" + } + + if server.LongLived { + summary += "\n 🔄 Long-lived server (stays running)" + } + + serverSummaries = append(serverSummaries, summary) + } + + // Create comprehensive result message + resultText := fmt.Sprintf("Successfully imported %d servers from %s\n\n", len(importedServerNames), registryURL) + resultText += strings.Join(serverSummaries, "\n\n") + + if len(importedServerNames) > 0 { + resultText += fmt.Sprintf("\n\n✅ Servers ready to use: %s", strings.Join(importedServerNames, ", ")) + } + + return &mcp.CallToolResult{ + Content: []mcp.Content{&mcp.TextContent{ + Text: resultText, + }}, + }, nil + } +} diff --git a/pkg/gateway/reload.go b/pkg/gateway/reload.go index 99685fe1..ea578f93 100644 --- a/pkg/gateway/reload.go +++ b/pkg/gateway/reload.go @@ -82,54 +82,66 @@ func (g *Gateway) reloadConfiguration(ctx context.Context, configuration Configu log.Log("- Adding internal tools (dynamic-tools feature enabled)") // Add mcp-find tool - mcpFindTool := g.createMcpFindTool(configuration) + var handler mcp.ToolHandler + if g.embeddingsClient != nil { + handler = embeddingStrategy(g) + } else { + handler = keywordStrategy(configuration) + } + log.Log(" > mcp-find: tool for finding MCP servers in the catalog") + mcpFindTool := g.createMcpFindTool(configuration, handler) g.mcpServer.AddTool(mcpFindTool.Tool, mcpFindTool.Handler) g.toolRegistrations[mcpFindTool.Tool.Name] = *mcpFindTool // Add mcp-add tool + log.Log(" > mcp-add: tool for adding MCP servers to the registry") mcpAddTool := g.createMcpAddTool(clientConfig) g.mcpServer.AddTool(mcpAddTool.Tool, mcpAddTool.Handler) g.toolRegistrations[mcpAddTool.Tool.Name] = *mcpAddTool // Add mcp-remove tool + log.Log(" > mcp-remove: tool for removing MCP servers from the registry") mcpRemoveTool := g.createMcpRemoveTool() g.mcpServer.AddTool(mcpRemoveTool.Tool, mcpRemoveTool.Handler) g.toolRegistrations[mcpRemoveTool.Tool.Name] = *mcpRemoveTool // Add codemode + log.Log(" > code-mode: write code that calls other MCPs directly") codeModeTool := g.createCodeModeTool(clientConfig) g.mcpServer.AddTool(codeModeTool.Tool, codeModeTool.Handler) g.toolRegistrations[codeModeTool.Tool.Name] = *codeModeTool // Add mcp-exec tool + log.Log(" > mcp-exec: execute tools that exist in the current session") mcpExecTool := g.createMcpExecTool() g.mcpServer.AddTool(mcpExecTool.Tool, mcpExecTool.Handler) g.toolRegistrations[mcpExecTool.Tool.Name] = *mcpExecTool // Add mcp-config-set tool + log.Log(" > mcp-config-set: tool for setting configuration values for MCP servers") mcpConfigSetTool := g.createMcpConfigSetTool(clientConfig) g.mcpServer.AddTool(mcpConfigSetTool.Tool, mcpConfigSetTool.Handler) g.toolRegistrations[mcpConfigSetTool.Tool.Name] = *mcpConfigSetTool - log.Log(" > mcp-find: tool for finding MCP servers in the catalog") - log.Log(" > mcp-add: tool for adding MCP servers to the registry") - log.Log(" > mcp-remove: tool for removing MCP servers from the registry") - log.Log(" > mcp-config-set: tool for setting configuration values for MCP servers") - log.Log(" > code-mode: write code that calls other MCPs directly") - log.Log(" > mcp-exec: execute tools that exist in the current session") + // Add mcp-create-profile tool + log.Log(" > mcp-create-profile: tool for creating or updating profiles with current gateway state") + mcpCreateProfileTool := g.createMcpCreateProfileTool(clientConfig) + g.mcpServer.AddTool(mcpCreateProfileTool.Tool, mcpCreateProfileTool.Handler) + g.toolRegistrations[mcpCreateProfileTool.Tool.Name] = *mcpCreateProfileTool + + // Add find-tools tool only if embeddings client is configured + if g.embeddingsClient != nil { + log.Log(" > find-tools: AI-powered tool recommendation based on task description") + findToolsTool := g.createFindToolsTool(clientConfig) + g.mcpServer.AddTool(findToolsTool.Tool, findToolsTool.Handler) + g.toolRegistrations[findToolsTool.Tool.Name] = *findToolsTool + } // Add mcp-registry-import tool // mcpRegistryImportTool := g.createMcpRegistryImportTool(configuration, clientConfig) // g.mcpServer.AddTool(mcpRegistryImportTool.Tool, mcpRegistryImportTool.Handler) // g.toolRegistrations[mcpRegistryImportTool.Tool.Name] = *mcpRegistryImportTool - // Add mcp-session-name tool - // mcpSessionNameTool := g.createMcpSessionNameTool() - // g.mcpServer.AddTool(mcpSessionNameTool.Tool, mcpSessionNameTool.Handler) - // g.toolRegistrations[mcpSessionNameTool.Tool.Name] = *mcpSessionNameTool - // log.Log(" > mcp-registry-import: tool for importing servers from MCP registry URLs") - // log.Log(" > mcp-session-name: tool for setting session name to persist configuration") - // Add prompt prompts.AddDiscoverPrompt(g.mcpServer) log.Log(" > mcp-discover: prompt for learning about dynamic server management") diff --git a/pkg/gateway/run.go b/pkg/gateway/run.go index dae3e085..ebe3db5e 100644 --- a/pkg/gateway/run.go +++ b/pkg/gateway/run.go @@ -6,6 +6,7 @@ import ( "io" "net" "os" + "path/filepath" "strings" "sync" "time" @@ -14,12 +15,14 @@ import ( "go.opentelemetry.io/otel" "github.com/docker/mcp-gateway/pkg/docker" + "github.com/docker/mcp-gateway/pkg/gateway/embeddings" "github.com/docker/mcp-gateway/pkg/health" "github.com/docker/mcp-gateway/pkg/interceptors" "github.com/docker/mcp-gateway/pkg/log" "github.com/docker/mcp-gateway/pkg/oauth" "github.com/docker/mcp-gateway/pkg/oci" "github.com/docker/mcp-gateway/pkg/telemetry" + "github.com/docker/mcp-gateway/pkg/user" ) type ServerSessionCache struct { @@ -70,6 +73,9 @@ type Gateway struct { // Track all tool registrations for mcp-exec toolRegistrations map[string]ToolRegistration + // embeddings client for vector search + embeddingsClient *embeddings.VectorDBClient + // authToken stores the authentication token for SSE/streaming modes authToken string // authTokenWasGenerated indicates whether the token was auto-generated or from environment @@ -128,6 +134,35 @@ func (g *Gateway) Run(ctx context.Context) error { log.SetLogWriter(multiWriter) } + // Initialize embeddings client if feature is enabled and OPENAI_API_KEY is set + if g.UseEmbeddings { + if os.Getenv("OPENAI_API_KEY") == "" { + log.Log("Warning: use-embeddings feature is enabled but OPENAI_API_KEY is not set") + log.Log("find-tools will not support vector similarity search") + } else { + homeDir, err := user.HomeDir() + if err == nil { + // Use ~/.docker/mcp as the embeddings directory (vectors.db will be there) + embeddingsDir := filepath.Join(homeDir, ".docker", "mcp") + + log.Logf("Initializing embeddings client with data directory: %s", embeddingsDir) + embClient, err := embeddings.NewVectorDBClient(ctx, embeddingsDir, 1536, func(msg string) { + if g.Verbose { + log.Log(msg) + } + }) + if err != nil { + log.Logf("Warning: Failed to initialize embeddings client: %v", err) + log.Log("find-tools will not support vector similarity search") + } else { + g.embeddingsClient = embClient + defer embClient.Close() + log.Log("Embeddings client initialized successfully") + } + } + } + } + // Record gateway start transportMode := "stdio" if g.Port != 0 { @@ -600,3 +635,42 @@ func (g *Gateway) routeEventToProvider(event oauth.Event) { // Other events (login-start, code-received, error) - ignore } } + +// GetToolRegistrations returns a copy of all registered tools +// This is useful for introspection and serialization +func (g *Gateway) GetToolRegistrations() map[string]ToolRegistration { + g.capabilitiesMu.RLock() + defer g.capabilitiesMu.RUnlock() + + // Create a copy to avoid external modification + registrations := make(map[string]ToolRegistration, len(g.toolRegistrations)) + for k, v := range g.toolRegistrations { + registrations[k] = v + } + return registrations +} + +// Configurator returns the gateway's configurator +// This is useful for programmatic access to configuration +func (g *Gateway) Configurator() Configurator { + return g.configurator +} + +// SetMCPServer sets the gateway's MCP server +// This is useful when initializing the gateway programmatically +func (g *Gateway) SetMCPServer(server *mcp.Server) { + g.mcpServer = server +} + +// ReloadConfiguration reloads the gateway configuration and capabilities +// This is useful for programmatic configuration updates +func (g *Gateway) ReloadConfiguration(ctx context.Context, configuration Configuration, serverNames []string, clientConfig *clientConfig) error { + g.configuration = configuration + return g.reloadConfiguration(ctx, configuration, serverNames, clientConfig) +} + +// PullAndVerify pulls and verifies Docker images for the configured servers +// This is useful when programmatically initializing the gateway +func (g *Gateway) PullAndVerify(ctx context.Context, configuration Configuration) error { + return g.pullAndVerify(ctx, configuration) +} diff --git a/pkg/integration_test.go b/pkg/integration_test.go index 3a490c14..66e05c57 100644 --- a/pkg/integration_test.go +++ b/pkg/integration_test.go @@ -1,17 +1,25 @@ package main import ( + "bytes" + "context" + "encoding/json" "fmt" + "io" + "net/http" "os" "os/exec" "path/filepath" "strings" "testing" + "time" + "github.com/modelcontextprotocol/go-sdk/mcp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/docker/mcp-gateway/cmd/docker-mcp/catalog" + mcpclient "github.com/docker/mcp-gateway/pkg/mcp" ) func thisIsAnIntegrationTest(t *testing.T) { @@ -184,3 +192,109 @@ func TestIntegrationCallToolDuckDuckDb(t *testing.T) { out := runDockerMCP(t, "tools", "call", "--gateway-arg="+strings.Join(gatewayArgs, ","), "search", "query=Docker") assert.Contains(t, out, "Found 10 search results") } + +func TestIntegrationOpenAIModels(t *testing.T) { + thisIsAnIntegrationTest(t) + + // Check for OPENAI_API_KEY + apiKey := os.Getenv("OPENAI_API_KEY") + if apiKey == "" { + t.Skip("OPENAI_API_KEY not set, skipping OpenAI integration test") + } + + // Create a test gateway client + args := []string{ + "mcp", + "gateway", + "run", + "--catalog=" + catalog.DockerCatalogURLV2, + "--servers=", + } + + c := mcpclient.NewStdioCmdClient("openai-test", "docker", os.Environ(), args...) + t.Cleanup(func() { + c.Session().Close() + }) + + initParams := &mcp.InitializeParams{ + ProtocolVersion: "2024-11-05", + ClientInfo: &mcp.Implementation{ + Name: "openai-test-client", + Version: "1.0.0", + }, + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + err := c.Initialize(ctx, initParams, false, nil, nil, nil) + require.NoError(t, err) + + // List available tools from the gateway + toolsResult, err := c.Session().ListTools(ctx, &mcp.ListToolsParams{}) + require.NoError(t, err) + require.NotNil(t, toolsResult) + + fmt.Printf("Found %d tools from gateway\n", len(toolsResult.Tools)) + + // Convert tools to OpenAI format + openaiTools := make([]map[string]any, 0, len(toolsResult.Tools)) + for _, tool := range toolsResult.Tools { + openaiTool := map[string]any{ + "type": "function", + "function": map[string]any{ + "name": tool.Name, + "description": tool.Description, + "parameters": tool.InputSchema, + }, + } + openaiTools = append(openaiTools, openaiTool) + } + + fmt.Printf("Converted %d tools for OpenAI\n", len(openaiTools)) + + // Make OpenAI API call with gpt-4.1 model + openaiURL := "https://api.openai.com/v1/chat/completions" + requestBody := map[string]any{ + "model": "gpt-4.1", + "messages": []map[string]string{ + { + "role": "user", + "content": "find an mcp server for GitHub", + }, + }, + "tools": openaiTools, + } + + requestJSON, err := json.Marshal(requestBody) + require.NoError(t, err) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, openaiURL, bytes.NewBuffer(requestJSON)) + require.NoError(t, err) + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+apiKey) + + httpClient := &http.Client{Timeout: 30 * time.Second} + resp, err := httpClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + + // Read response body for better error messages + respBody, err := io.ReadAll(resp.Body) + require.NoError(t, err) + + assert.Equal(t, http.StatusOK, resp.StatusCode, "OpenAI API should return 200, got body: %s", string(respBody)) + + // Parse response + var openaiResp map[string]any + err = json.Unmarshal(respBody, &openaiResp) + require.NoError(t, err) + + // Verify we got a response with choices + choices, ok := openaiResp["choices"].([]any) + require.True(t, ok, "Response should contain choices") + require.NotEmpty(t, choices, "Should have at least one choice") + + fmt.Printf("OpenAI Response: %+v\n", openaiResp) +} diff --git a/shell.nix b/shell.nix index 77a34bd8..ef37f672 100644 --- a/shell.nix +++ b/shell.nix @@ -22,6 +22,8 @@ pkgs.mkShell { curl jq wget + + marp-cli ]; shellHook = '' diff --git a/test/embeddings/clj/dmr.clj b/test/embeddings/clj/dmr.clj new file mode 100644 index 00000000..d975f0e7 --- /dev/null +++ b/test/embeddings/clj/dmr.clj @@ -0,0 +1,135 @@ +(ns dmr + (:require + [babashka.curl :as curl] + [cheshire.core :as json] + [vector-db-process :as vec-db])) + +;; ================================================== +;; DMR +;; ================================================== + +(def url "localhost/exp/vDD4.40/engines/llama.cpp/v1/embeddings") +(def models-url "localhost/exp/vDD4.40/engines/llama.cpp/v1/models") +(def create-models-url "localhost/exp/vDD4.40/models/create") +(def socket-path {:raw-args ["--unix-socket" "/var/run/docker.sock"]}) +(def summary-url "localhost/exp/vDD4.40/engines/llama.cpp/v1/chat/completions") + +(defn get-models-url [namespace name] (format "localhost/exp/vDD4.40/engines/llama.cpp/v1/models/%s/%s" namespace name)) + +(defn check + "check the http response" + [status response] + (when (not (= status (:status response))) + (println (format "%s not equal %s - %s" status (:status response) response)) + (throw (ex-info "failed" response))) + response) + +(defn dmr-embeddings + "Stub function for /exp/vDD4.40/engines/llama.cpp/v1/chat/embeddings endpoint." + [embedding-model request] + (curl/post + url + (merge + socket-path + (update + {:body {:model embedding-model} + :headers {"Content-Type" "application/json"} + :throw false} + :body (comp json/generate-string merge) request)))) + +(defn dmr-completion + "Stub function for /exp/vDD4.40/engines/llama.cpp/v1/chat/embeddings endpoint." + [summary-model request] + (curl/post + summary-url + (merge + socket-path + (update + {:body {:model summary-model} + :headers {"Content-Type" "application/json"} + :throw false} + :body (comp json/generate-string merge) request)))) + +(defn dmr-models [] + (curl/get + models-url + (merge + socket-path + {:throw false}))) + +(defn dmr-get-model [namespace name] + (curl/get + (get-models-url namespace name) + (merge + socket-path + {:throw false}))) + +(defn dmr-create-model [s] + (curl/post + create-models-url + (merge + socket-path + {:throw false + :body (json/generate-string {:from s})}))) + +;; ================================================== +;; OpenAI +;; ================================================== +(defn gpt-embeddings + [request] + (curl/post + "https://api.openai.com/v1/embeddings" + (update + {:body {:model "text-embedding-3-small"} + :headers {"Content-Type" "application/json" + "Authorization" (format "Bearer %s" (System/getenv "OPENAI_API_KEY"))} + :throw false} + :body (comp json/generate-string merge) request))) + +(defn gpt-completion + [request] + (curl/post + "https://api.openai.com/v1/chat/completions" + (update + {:body {:model "gpt-4.1"} + :headers {"Content-Type" "application/json" + "Authorization" (format "Bearer %s" (System/getenv "OPENAI_API_KEY"))} + :throw false} + :body (comp json/generate-string merge) request))) + +;; ================================================== +;; LLM Ops that could work with either OpenAI or DMR +;; ================================================== +(defn create-embedding [embedding-fn s] + (-> + ((comp (partial check 200) embedding-fn) {:input s}) + :body + (json/parse-string keyword) + :data + first + :embedding)) + +(defn summarize-tool [completion-fn s] + (-> + ((comp (partial check 200) completion-fn) + {:messages + [{:role "user" + :content (format + "Summarize the following content thoroughly but remove any examples or extraneous details + Do not try to explain how you summarized or that you're providing a summary. + Always return a summary. Do not just return the input json. + Start summarizing everything coming after this: \n\n%s" s)}]}) + :body + (json/parse-string keyword) + :choices + first + :message + :content)) + +;; ================================================== +;; Vector DB OPs +;; ================================================== +(defn search [{:keys [embedding-fn connection] :as options} s] + (let [vec (create-embedding embedding-fn s)] + (vec-db/search-vectors connection vec options))) + diff --git a/test/embeddings/clj/embeddings.clj b/test/embeddings/clj/embeddings.clj new file mode 100644 index 00000000..11eb887a --- /dev/null +++ b/test/embeddings/clj/embeddings.clj @@ -0,0 +1,204 @@ +(ns embeddings + (:require + [babashka.curl :as curl] + [cheshire.core :as json] + [clj-yaml.core :as yaml] + [clojure.core.async :as async] + [tolkien.core :as tolkien] + [vector-db-process :as vec-db] + [dmr])) + +;; ================================================== +;; Perform Embeddings +;; ================================================== +(defn summarize-registration [registration] + (str + #_(format "This tool comes from %s\n%s\n" (:server_name registration) (:server_title registration)) + (format "It provides the tool %s %s - %s\n" (-> registration :tool :name) (or (-> registration :tool :title) "") (-> registration :tool :description)) + (format "Input parameters are %s" (->> registration + :tool + :inputSchema + :properties + (map (fn [[k v]] (format "%s %s\n" (name k) (:description v)))) + (apply str))))) + +(defn embed-servers + "embed the server descriptions" + [{:keys [embedding-fn summarize-fn connection]} collection servers] + (println "> embed collection" (:name collection)) + (async/go + (async/ embed server" (-> server :name) " -> " (count summary)) + (async/ embed collection" (:name collection)) + (async/go + (async/ embed tool" (-> tool-registration :tool :name) " -> " (count summary)) + (async/ tool-registration + (update :tool dissoc :outputSchema) + (json/generate-string)) + json))) + +(def servers + ["github-official" "gitmcp" "slack" "fetch" "duckduckgo" + "brave" "context7" "dockerhub" "playwright" "wikipedia-mcp" "SQLite" "notion-remote" "rust-mcp-filesystem" "arxiv-mcp-server" "google-maps" "google-maps-comprehensive" "hugging-face" "linkedin-mcp-server" "desktop-commander" + "openbnb-airbnb" + "youtube_transcript" + "time" + "sequentialthinking" + "semgrep" + "resend" + "papersearch" + "openweather" + "openapi-schema" + "openapi" + "node-code-sandbox" + "minecraft-wiki" + "microsoft-learn" + "memory" + "mcp-hackernews" + "maven-tools-mcp" + "markitdown" + "gemini-api-docs" + "filesystem" + "everart" + "stripe" + "elevenlabs"]) + +(def fetch (memoize (fn [url] (try (:body (curl/get url)) (catch Throwable _ ""))))) + +(defn filter-names [coll] (->> coll (map :name))) + +(defn read-catalog [] + (->> (slurp "/Users/slim/.docker/mcp/catalogs/docker-mcp.yaml") + (yaml/parse-string) + :registry + (map (fn [[k v]] (assoc (select-keys v [:title :description :type :readme :toolsUrl]) :name (name k)))) + #_(map (fn [m] (update m :readme fetch))) + (map (fn [m] (update m :toolsUrl (comp filter-names (fn [s] (json/parse-string s keyword)) fetch)))) + (map #(assoc % :tokens ((comp (partial tolkien/count-tokens "text-embedding-3-small") json/generate-string) %))))) + +(defn cleanup-vectors [{:keys [connection]}] + (async/go + (doseq [item (async/> catalog + (filter #(< 8191 (:tokens %))) + (map #(select-keys % [:name :tokens]))) + (time + (async/ " + (-> + (vals (json/parse-string (slurp (format "/Users/slim/docker/mcp-gateway/examples/tool_registrations/tool-json/%s.json" s)) keyword)) + (json/generate-string) + (count)))) + + ;; all tools should have less than 2048 tokens in the data being embedded - should be empty + (->> + (for [s servers] + (for [tool (vals (json/parse-string (slurp (format "/Users/slim/docker/mcp-gateway/examples/tool_registrations/tool-json/%s.json" s)) keyword))] + [s (-> tool :tool :name) (tolkien/count-tokens "text-embedding-3-small" (json-with-token-check tool))])) + (apply concat) + (filter (fn [[_ _ n]] (< 2048 n))))) + + diff --git a/test/embeddings/clj/vector_db_process.clj b/test/embeddings/clj/vector_db_process.clj new file mode 100644 index 00000000..18a1ad9c --- /dev/null +++ b/test/embeddings/clj/vector_db_process.clj @@ -0,0 +1,253 @@ +(ns vector-db-process + (:require + [babashka.process :as process] + [cheshire.core :as json] + [clojure.core.async :as async] + [clojure.java.io :as io] + [lsp4clj.io-server :as io-server] + [lsp4clj.server :as server] + [lsp4clj.io-chan :as io-chan])) + +;; Start the vector DB docker container as a background process +;; and return the process handle with stdin/stdout/stderr streams +(defn start-vector-db + "Start the jimclark106/vector-db Docker container with interactive streams. + Returns a map with :process, :in (stdin stream), :out (stdout stream), and :err (stderr stream)" + [{:keys [dimension db name]}] + (let [cmd ["docker" "run" "-i" "--rm" + "--name" name + "--platform" "linux/amd64" + "-v" "./data:/data" + "-e" (format "DB_PATH=/data/%s" db) + "-e" (format "VECTOR_DIMENSION=%s" dimension) + "jimclark106/vector-db:latest"] + proc (process/process cmd {:in :stream + :out :stream + :err :stream})] + {:process proc + :in (:in proc) + :out (:out proc) + :err (:err proc)})) + +(defn stop-container + "Stop the container by destroying the process" + [{:keys [process]}] + (process/destroy process)) + +(defn container-alive? + "Check if the container process is still alive" + [{:keys [process]}] + (process/alive? process)) + +(defn wait-for-container + "Wait for the container to exit and return the exit code" + [{:keys [process]}] + @process) + +(declare mcp-initialize) +(defn vector-db-stdio-server + "Create a stdio-server using the Docker container's stdin/stdout streams. + First starts the vector-db container, then creates a server reading from + the container's stdout and writing to its stdin. + Returns a map with :server, :container, and :join (future that completes when server exits)." + ([] (vector-db-stdio-server {:dimension 1536 :db "vectors.db"})) + ([opts] + (let [log-ch (or (:log-ch opts) (async/chan)) + trace-ch (or (:trace-ch opts) (async/chan)) + container (start-vector-db opts) + + ;; Debug: spawn a thread to monitor stderr + _ (async/thread + (let [reader (io/reader (:err container))] + (loop [] + (when-let [_ (.readLine reader)] + (recur))))) + + ;; Use keyword instead of csk/->kebab-case-keyword to keep keys as-is + ;; The lsp4clj server expects :id, :jsonrpc, :method, :result, etc. + mcp-in-factory (fn [in opts] + (io-chan/mcp-input-stream->input-chan in (assoc opts + :keyword-function keyword + :log-ch log-ch))) + srv (io-server/server (merge {:trace-level "verbose"} + opts + {:in (:out container) + :out (:in container) + :log-ch log-ch + ;:trace-ch trace-ch + :in-chan-factory mcp-in-factory + :out-chan-factory io-chan/mcp-output-stream->output-chan})) + join (server/start srv nil)] + ;; Spawn a thread to print log messages (first 20 chars only) + (async/go-loop [] + (when-let [log-msg (async/ response :content first :text (json/parse-string keyword) :collections) + (catch Exception e + {:error (str "Failed to parse collections response: " (.getMessage e))})))))) + +(defn add-vector + "Add a vector to a collection (creates collection if it doesn't exist). + vector must be a sequence of 1536 numbers. + metadata is an optional map." + [server-container collection-name vector & [metadata]] + (mcp-call-tool server-container "add_vector" + (cond-> {:collection_name collection-name + :vector vector} + metadata (assoc :metadata metadata)))) + +(defn delete-vector + "Delete a vector by its ID" + [server-container vector-id] + (mcp-call-tool server-container "delete_vector" {:id vector-id})) + +(defn search-vectors + "Search for similar vectors using cosine distance. + vector must be a sequence of 1536 numbers. + Options: + - :collection_name - search only within this collection + - :exclude_collections - vector of collection names to exclude + - :limit - maximum number of results (default 10) + Returns a go channel that will emit the parsed search results." + [server-container vector & [options]] + (async/go + (let [response (async/ response :content first :text (json/parse-string keyword)) + (catch Exception e + {:error (str "Failed to parse search response: " (.getMessage e))})))))) + +(comment + ;; Start the container + (def db (start-vector-db {:name "vectors" :dimension 1536 :db "vectors.db"})) + + ;; Access the raw streams + (:in db) ; stdin stream + (:out db) ; stdout stream + (:err db) ; stderr stream + + ;; Check if it's running + (container-alive? db) + + ;; Stop the container when done + (stop-container db) + + ;; Or wait for it to exit naturally + (wait-for-container db) + + ;; Create a stdio-server using the container's streams + (def server-container (vector-db-stdio-server {:name "vectors" :dimension 1536 :db "vectors.db"})) + (:server server-container) ; The stdio-server + (:container server-container) ; The container info + + ;; List available tools + (def list-ch (mcp-list-tools server-container)) + ;; Wait for the tools list + (async/ {} }: + +pkgs.mkShell { + buildInputs = with pkgs; [ + # Go toolchain + go + + # Task runner (go-task) + go-task + + # Additional useful Go development tools + gopls # Go Language Server + golangci-lint # Go linter + delve # Go debugger + gotools # Additional Go tools (goimports, etc.) + gofumpt + + # Git for version control + git + + ko + + # Common development utilities + curl + jq + wget + + clojure + ]; + + shellHook = '' + echo "🚀 Go development environment loaded!" + echo "" + echo "Available tools:" + echo " • Go $(go version | cut -d' ' -f3)" + echo " • Task $(task --version)" + echo " • gopls (Go Language Server)" + echo " • golangci-lint" + echo " • delve (Go debugger)" + echo "" + echo "Getting started:" + echo " • Initialize a new Go module: go mod init " + echo " • Create a Taskfile.yml for task automation" + echo " • Run 'task --list' to see available tasks" + echo "" + + # Set up Go environment variables + export GOPATH="$HOME/go" + export GOBIN="$GOPATH/bin" + export PATH="$GOBIN:$PATH" + + # Create GOPATH directories if they don't exist + mkdir -p "$GOPATH"/{bin,src,pkg} + + echo "Environment variables set:" + echo " • GOPATH=$GOPATH" + echo " • GOBIN=$GOBIN" + echo "" + ''; + + # Set environment variables + GOROOT = "${pkgs.go}/share/go"; +}