mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-04-17 23:53:24 +02:00
Merge branch 'main' into patch-1
This commit is contained in:
39
.github/workflows/python.yml
vendored
39
.github/workflows/python.yml
vendored
@@ -23,8 +23,45 @@ jobs:
|
||||
PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "packages=$PACKAGES" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
test:
|
||||
needs: [detect-packages]
|
||||
strategy:
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
|
||||
name: Test ${{ matrix.package }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "src/${{ matrix.package }}/.python-version"
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: uv sync --frozen --all-extras --dev
|
||||
|
||||
- name: Check if tests exist
|
||||
id: check-tests
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: |
|
||||
if [ -d "tests" ] || [ -d "test" ] || grep -q "pytest" pyproject.toml; then
|
||||
echo "has-tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has-tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run tests
|
||||
if: steps.check-tests.outputs.has-tests == 'true'
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: uv run pytest
|
||||
|
||||
build:
|
||||
needs: [detect-packages, test]
|
||||
strategy:
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
|
||||
|
||||
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@@ -212,10 +212,3 @@ jobs:
|
||||
--title "Release $VERSION" \
|
||||
--notes-file RELEASE_NOTES.md
|
||||
|
||||
- name: Docker MCP images
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DOCKER_TOKEN }}
|
||||
repository: docker/labs-ai-tools-for-devs
|
||||
event-type: build-mcp-images
|
||||
client-payload: '{"ref": "${{ needs.create-metadata.outputs.version }}"}'
|
||||
|
||||
37
.github/workflows/typescript.yml
vendored
37
.github/workflows/typescript.yml
vendored
@@ -22,8 +22,43 @@ jobs:
|
||||
PACKAGES=$(find . -name package.json -not -path "*/node_modules/*" -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "packages=$PACKAGES" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
test:
|
||||
needs: [detect-packages]
|
||||
strategy:
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
|
||||
name: Test ${{ matrix.package }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: npm ci
|
||||
|
||||
- name: Check if tests exist
|
||||
id: check-tests
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: |
|
||||
if npm run test --silent 2>/dev/null; then
|
||||
echo "has-tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has-tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run tests
|
||||
if: steps.check-tests.outputs.has-tests == 'true'
|
||||
working-directory: src/${{ matrix.package }}
|
||||
run: npm test
|
||||
|
||||
build:
|
||||
needs: [detect-packages, test]
|
||||
strategy:
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
|
||||
|
||||
30
.github/workflows/version-check.yml
vendored
30
.github/workflows/version-check.yml
vendored
@@ -1,30 +0,0 @@
|
||||
name: Version Consistency Check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
github:
|
||||
name: Check GitHub server version consistency
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check version consistency
|
||||
run: |
|
||||
PACKAGE_VERSION=$(node -p "require('./src/github/package.json').version")
|
||||
TS_VERSION=$(grep -o '".*"' ./src/github/common/version.ts | tr -d '"')
|
||||
|
||||
if [ "$PACKAGE_VERSION" != "$TS_VERSION" ]; then
|
||||
echo "::error::Version mismatch detected!"
|
||||
echo "::error::package.json version: $PACKAGE_VERSION"
|
||||
echo "::error::version.ts version: $TS_VERSION"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ Versions match: $PACKAGE_VERSION"
|
||||
fi
|
||||
1
.vscode/settings.json
vendored
Normal file
1
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
@@ -1,7 +1,7 @@
|
||||
# Security Policy
|
||||
Thank you for helping us keep our MCP servers secure.
|
||||
|
||||
These servers are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project.
|
||||
The **reference servers** in this repo are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project.
|
||||
|
||||
The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities.
|
||||
|
||||
|
||||
8062
package-lock.json
generated
8062
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -20,14 +20,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/server-everything": "*",
|
||||
"@modelcontextprotocol/server-gdrive": "*",
|
||||
"@modelcontextprotocol/server-postgres": "*",
|
||||
"@modelcontextprotocol/server-puppeteer": "*",
|
||||
"@modelcontextprotocol/server-slack": "*",
|
||||
"@modelcontextprotocol/server-brave-search": "*",
|
||||
"@modelcontextprotocol/server-memory": "*",
|
||||
"@modelcontextprotocol/server-filesystem": "*",
|
||||
"@modelcontextprotocol/server-everart": "*",
|
||||
"@modelcontextprotocol/server-sequential-thinking": "*"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
COPY src/aws-kb-retrieval-server /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,79 +0,0 @@
|
||||
# AWS Knowledge Base Retrieval MCP Server
|
||||
|
||||
An MCP server implementation for retrieving information from the AWS Knowledge Base using the Bedrock Agent Runtime.
|
||||
|
||||
## Features
|
||||
|
||||
- **RAG (Retrieval-Augmented Generation)**: Retrieve context from the AWS Knowledge Base based on a query and a Knowledge Base ID.
|
||||
- **Supports multiple results retrieval**: Option to retrieve a customizable number of results.
|
||||
|
||||
## Tools
|
||||
|
||||
- **retrieve_from_aws_kb**
|
||||
- Perform retrieval operations using the AWS Knowledge Base.
|
||||
- Inputs:
|
||||
- `query` (string): The search query for retrieval.
|
||||
- `knowledgeBaseId` (string): The ID of the AWS Knowledge Base.
|
||||
- `n` (number, optional): Number of results to retrieve (default: 3).
|
||||
|
||||
## Configuration
|
||||
|
||||
### Setting up AWS Credentials
|
||||
|
||||
1. Obtain AWS access key ID, secret access key, and region from the AWS Management Console.
|
||||
2. Ensure these credentials have appropriate permissions for Bedrock Agent Runtime operations.
|
||||
|
||||
### Usage with Claude Desktop
|
||||
|
||||
Add this to your `claude_desktop_config.json`:
|
||||
|
||||
#### Docker
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"aws-kb-retrieval": {
|
||||
"command": "docker",
|
||||
"args": [ "run", "-i", "--rm", "-e", "AWS_ACCESS_KEY_ID", "-e", "AWS_SECRET_ACCESS_KEY", "-e", "AWS_REGION", "mcp/aws-kb-retrieval-server" ],
|
||||
"env": {
|
||||
"AWS_ACCESS_KEY_ID": "YOUR_ACCESS_KEY_HERE",
|
||||
"AWS_SECRET_ACCESS_KEY": "YOUR_SECRET_ACCESS_KEY_HERE",
|
||||
"AWS_REGION": "YOUR_AWS_REGION_HERE"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"aws-kb-retrieval": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-aws-kb-retrieval"
|
||||
],
|
||||
"env": {
|
||||
"AWS_ACCESS_KEY_ID": "YOUR_ACCESS_KEY_HERE",
|
||||
"AWS_SECRET_ACCESS_KEY": "YOUR_SECRET_ACCESS_KEY_HERE",
|
||||
"AWS_REGION": "YOUR_AWS_REGION_HERE"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
Docker:
|
||||
|
||||
```sh
|
||||
docker build -t mcp/aws-kb-retrieval -f src/aws-kb-retrieval-server/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
|
||||
This README assumes that your server package is named `@modelcontextprotocol/server-aws-kb-retrieval`. Adjust the package name and installation details if they differ in your setup. Also, ensure that your server script is correctly built and that all dependencies are properly managed in your `package.json`.
|
||||
@@ -1,166 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import {
|
||||
BedrockAgentRuntimeClient,
|
||||
RetrieveCommand,
|
||||
RetrieveCommandInput,
|
||||
} from "@aws-sdk/client-bedrock-agent-runtime";
|
||||
|
||||
// AWS client initialization
|
||||
const bedrockClient = new BedrockAgentRuntimeClient({
|
||||
region: process.env.AWS_REGION,
|
||||
credentials: {
|
||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
|
||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
|
||||
},
|
||||
});
|
||||
|
||||
interface RAGSource {
|
||||
id: string;
|
||||
fileName: string;
|
||||
snippet: string;
|
||||
score: number;
|
||||
}
|
||||
|
||||
async function retrieveContext(
|
||||
query: string,
|
||||
knowledgeBaseId: string,
|
||||
n: number = 3
|
||||
): Promise<{
|
||||
context: string;
|
||||
isRagWorking: boolean;
|
||||
ragSources: RAGSource[];
|
||||
}> {
|
||||
try {
|
||||
if (!knowledgeBaseId) {
|
||||
console.error("knowledgeBaseId is not provided");
|
||||
return {
|
||||
context: "",
|
||||
isRagWorking: false,
|
||||
ragSources: [],
|
||||
};
|
||||
}
|
||||
|
||||
const input: RetrieveCommandInput = {
|
||||
knowledgeBaseId: knowledgeBaseId,
|
||||
retrievalQuery: { text: query },
|
||||
retrievalConfiguration: {
|
||||
vectorSearchConfiguration: { numberOfResults: n },
|
||||
},
|
||||
};
|
||||
|
||||
const command = new RetrieveCommand(input);
|
||||
const response = await bedrockClient.send(command);
|
||||
const rawResults = response?.retrievalResults || [];
|
||||
const ragSources: RAGSource[] = rawResults
|
||||
.filter((res) => res?.content?.text)
|
||||
.map((result, index) => {
|
||||
const uri = result?.location?.s3Location?.uri || "";
|
||||
const fileName = uri.split("/").pop() || `Source-${index}.txt`;
|
||||
return {
|
||||
id: (result.metadata?.["x-amz-bedrock-kb-chunk-id"] as string) || `chunk-${index}`,
|
||||
fileName: fileName.replace(/_/g, " ").replace(".txt", ""),
|
||||
snippet: result.content?.text || "",
|
||||
score: (result.score as number) || 0,
|
||||
};
|
||||
})
|
||||
.slice(0, 3);
|
||||
|
||||
const context = rawResults
|
||||
.filter((res): res is { content: { text: string } } => res?.content?.text !== undefined)
|
||||
.map(res => res.content.text)
|
||||
.join("\n\n");
|
||||
|
||||
return {
|
||||
context,
|
||||
isRagWorking: true,
|
||||
ragSources,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("RAG Error:", error);
|
||||
return { context: "", isRagWorking: false, ragSources: [] };
|
||||
}
|
||||
}
|
||||
|
||||
// Define the retrieval tool
|
||||
const RETRIEVAL_TOOL: Tool = {
|
||||
name: "retrieve_from_aws_kb",
|
||||
description: "Performs retrieval from the AWS Knowledge Base using the provided query and Knowledge Base ID.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: { type: "string", description: "The query to perform retrieval on" },
|
||||
knowledgeBaseId: { type: "string", description: "The ID of the AWS Knowledge Base" },
|
||||
n: { type: "number", default: 3, description: "Number of results to retrieve" },
|
||||
},
|
||||
required: ["query", "knowledgeBaseId"],
|
||||
},
|
||||
};
|
||||
|
||||
// Server setup
|
||||
const server = new Server(
|
||||
{
|
||||
name: "aws-kb-retrieval-server",
|
||||
version: "0.2.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Request handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [RETRIEVAL_TOOL],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
if (name === "retrieve_from_aws_kb") {
|
||||
const { query, knowledgeBaseId, n = 3 } = args as Record<string, any>;
|
||||
try {
|
||||
const result = await retrieveContext(query, knowledgeBaseId, n);
|
||||
if (result.isRagWorking) {
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Context: ${result.context}` },
|
||||
{ type: "text", text: `RAG Sources: ${JSON.stringify(result.ragSources)}` },
|
||||
],
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
content: [{ type: "text", text: "Retrieval failed or returned no results." }],
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{ type: "text", text: `Error occurred: ${error}` }],
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
content: [{ type: "text", text: `Unknown tool: ${name}` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// Server startup
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("AWS KB Retrieval Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error running server:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-aws-kb-retrieval",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for AWS Knowledge Base retrieval using Bedrock Agent Runtime",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-aws-kb-retrieval": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"@aws-sdk/client-bedrock-agent-runtime": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": ".",
|
||||
"composite": true,
|
||||
"incremental": true,
|
||||
"tsBuildInfoFile": "./dist/.tsbuildinfo"
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||
COPY src/brave-search /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,92 +0,0 @@
|
||||
# Brave Search MCP Server
|
||||
|
||||
An MCP server implementation that integrates the Brave Search API, providing both web and local search capabilities.
|
||||
|
||||
## Features
|
||||
|
||||
- **Web Search**: General queries, news, articles, with pagination and freshness controls
|
||||
- **Local Search**: Find businesses, restaurants, and services with detailed information
|
||||
- **Flexible Filtering**: Control result types, safety levels, and content freshness
|
||||
- **Smart Fallbacks**: Local search automatically falls back to web when no results are found
|
||||
|
||||
## Tools
|
||||
|
||||
- **brave_web_search**
|
||||
- Execute web searches with pagination and filtering
|
||||
- Inputs:
|
||||
- `query` (string): Search terms
|
||||
- `count` (number, optional): Results per page (max 20)
|
||||
- `offset` (number, optional): Pagination offset (max 9)
|
||||
|
||||
- **brave_local_search**
|
||||
- Search for local businesses and services
|
||||
- Inputs:
|
||||
- `query` (string): Local search terms
|
||||
- `count` (number, optional): Number of results (max 20)
|
||||
- Automatically falls back to web search if no local results found
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
### Getting an API Key
|
||||
1. Sign up for a [Brave Search API account](https://brave.com/search/api/)
|
||||
2. Choose a plan (Free tier available with 2,000 queries/month)
|
||||
3. Generate your API key [from the developer dashboard](https://api.search.brave.com/app/keys)
|
||||
|
||||
### Usage with Claude Desktop
|
||||
Add this to your `claude_desktop_config.json`:
|
||||
|
||||
### Docker
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"brave-search": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"-e",
|
||||
"BRAVE_API_KEY",
|
||||
"mcp/brave-search"
|
||||
],
|
||||
"env": {
|
||||
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"brave-search": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-brave-search"
|
||||
],
|
||||
"env": {
|
||||
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
```bash
|
||||
docker build -t mcp/brave-search:latest -f src/brave-search/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,376 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
const WEB_SEARCH_TOOL: Tool = {
|
||||
name: "brave_web_search",
|
||||
description:
|
||||
"Performs a web search using the Brave Search API, ideal for general queries, news, articles, and online content. " +
|
||||
"Use this for broad information gathering, recent events, or when you need diverse web sources. " +
|
||||
"Supports pagination, content filtering, and freshness controls. " +
|
||||
"Maximum 20 results per request, with offset for pagination. ",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description: "Search query (max 400 chars, 50 words)"
|
||||
},
|
||||
count: {
|
||||
type: "number",
|
||||
description: "Number of results (1-20, default 10)",
|
||||
default: 10
|
||||
},
|
||||
offset: {
|
||||
type: "number",
|
||||
description: "Pagination offset (max 9, default 0)",
|
||||
default: 0
|
||||
},
|
||||
},
|
||||
required: ["query"],
|
||||
},
|
||||
};
|
||||
|
||||
const LOCAL_SEARCH_TOOL: Tool = {
|
||||
name: "brave_local_search",
|
||||
description:
|
||||
"Searches for local businesses and places using Brave's Local Search API. " +
|
||||
"Best for queries related to physical locations, businesses, restaurants, services, etc. " +
|
||||
"Returns detailed information including:\n" +
|
||||
"- Business names and addresses\n" +
|
||||
"- Ratings and review counts\n" +
|
||||
"- Phone numbers and opening hours\n" +
|
||||
"Use this when the query implies 'near me' or mentions specific locations. " +
|
||||
"Automatically falls back to web search if no local results are found.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description: "Local search query (e.g. 'pizza near Central Park')"
|
||||
},
|
||||
count: {
|
||||
type: "number",
|
||||
description: "Number of results (1-20, default 5)",
|
||||
default: 5
|
||||
},
|
||||
},
|
||||
required: ["query"]
|
||||
}
|
||||
};
|
||||
|
||||
// Server implementation
|
||||
const server = new Server(
|
||||
{
|
||||
name: "example-servers/brave-search",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Check for API key
|
||||
const BRAVE_API_KEY = process.env.BRAVE_API_KEY!;
|
||||
if (!BRAVE_API_KEY) {
|
||||
console.error("Error: BRAVE_API_KEY environment variable is required");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const RATE_LIMIT = {
|
||||
perSecond: 1,
|
||||
perMonth: 15000
|
||||
};
|
||||
|
||||
let requestCount = {
|
||||
second: 0,
|
||||
month: 0,
|
||||
lastReset: Date.now()
|
||||
};
|
||||
|
||||
function checkRateLimit() {
|
||||
const now = Date.now();
|
||||
if (now - requestCount.lastReset > 1000) {
|
||||
requestCount.second = 0;
|
||||
requestCount.lastReset = now;
|
||||
}
|
||||
if (requestCount.second >= RATE_LIMIT.perSecond ||
|
||||
requestCount.month >= RATE_LIMIT.perMonth) {
|
||||
throw new Error('Rate limit exceeded');
|
||||
}
|
||||
requestCount.second++;
|
||||
requestCount.month++;
|
||||
}
|
||||
|
||||
interface BraveWeb {
|
||||
web?: {
|
||||
results?: Array<{
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
language?: string;
|
||||
published?: string;
|
||||
rank?: number;
|
||||
}>;
|
||||
};
|
||||
locations?: {
|
||||
results?: Array<{
|
||||
id: string; // Required by API
|
||||
title?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
interface BraveLocation {
|
||||
id: string;
|
||||
name: string;
|
||||
address: {
|
||||
streetAddress?: string;
|
||||
addressLocality?: string;
|
||||
addressRegion?: string;
|
||||
postalCode?: string;
|
||||
};
|
||||
coordinates?: {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
};
|
||||
phone?: string;
|
||||
rating?: {
|
||||
ratingValue?: number;
|
||||
ratingCount?: number;
|
||||
};
|
||||
openingHours?: string[];
|
||||
priceRange?: string;
|
||||
}
|
||||
|
||||
interface BravePoiResponse {
|
||||
results: BraveLocation[];
|
||||
}
|
||||
|
||||
interface BraveDescription {
|
||||
descriptions: {[id: string]: string};
|
||||
}
|
||||
|
||||
function isBraveWebSearchArgs(args: unknown): args is { query: string; count?: number } {
|
||||
return (
|
||||
typeof args === "object" &&
|
||||
args !== null &&
|
||||
"query" in args &&
|
||||
typeof (args as { query: string }).query === "string"
|
||||
);
|
||||
}
|
||||
|
||||
function isBraveLocalSearchArgs(args: unknown): args is { query: string; count?: number } {
|
||||
return (
|
||||
typeof args === "object" &&
|
||||
args !== null &&
|
||||
"query" in args &&
|
||||
typeof (args as { query: string }).query === "string"
|
||||
);
|
||||
}
|
||||
|
||||
async function performWebSearch(query: string, count: number = 10, offset: number = 0) {
|
||||
checkRateLimit();
|
||||
const url = new URL('https://api.search.brave.com/res/v1/web/search');
|
||||
url.searchParams.set('q', query);
|
||||
url.searchParams.set('count', Math.min(count, 20).toString()); // API limit
|
||||
url.searchParams.set('offset', offset.toString());
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': BRAVE_API_KEY
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`);
|
||||
}
|
||||
|
||||
const data = await response.json() as BraveWeb;
|
||||
|
||||
// Extract just web results
|
||||
const results = (data.web?.results || []).map(result => ({
|
||||
title: result.title || '',
|
||||
description: result.description || '',
|
||||
url: result.url || ''
|
||||
}));
|
||||
|
||||
return results.map(r =>
|
||||
`Title: ${r.title}\nDescription: ${r.description}\nURL: ${r.url}`
|
||||
).join('\n\n');
|
||||
}
|
||||
|
||||
async function performLocalSearch(query: string, count: number = 5) {
|
||||
checkRateLimit();
|
||||
// Initial search to get location IDs
|
||||
const webUrl = new URL('https://api.search.brave.com/res/v1/web/search');
|
||||
webUrl.searchParams.set('q', query);
|
||||
webUrl.searchParams.set('search_lang', 'en');
|
||||
webUrl.searchParams.set('result_filter', 'locations');
|
||||
webUrl.searchParams.set('count', Math.min(count, 20).toString());
|
||||
|
||||
const webResponse = await fetch(webUrl, {
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': BRAVE_API_KEY
|
||||
}
|
||||
});
|
||||
|
||||
if (!webResponse.ok) {
|
||||
throw new Error(`Brave API error: ${webResponse.status} ${webResponse.statusText}\n${await webResponse.text()}`);
|
||||
}
|
||||
|
||||
const webData = await webResponse.json() as BraveWeb;
|
||||
const locationIds = webData.locations?.results?.filter((r): r is {id: string; title?: string} => r.id != null).map(r => r.id) || [];
|
||||
|
||||
if (locationIds.length === 0) {
|
||||
return performWebSearch(query, count); // Fallback to web search
|
||||
}
|
||||
|
||||
// Get POI details and descriptions in parallel
|
||||
const [poisData, descriptionsData] = await Promise.all([
|
||||
getPoisData(locationIds),
|
||||
getDescriptionsData(locationIds)
|
||||
]);
|
||||
|
||||
return formatLocalResults(poisData, descriptionsData);
|
||||
}
|
||||
|
||||
async function getPoisData(ids: string[]): Promise<BravePoiResponse> {
|
||||
checkRateLimit();
|
||||
const url = new URL('https://api.search.brave.com/res/v1/local/pois');
|
||||
ids.filter(Boolean).forEach(id => url.searchParams.append('ids', id));
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': BRAVE_API_KEY
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`);
|
||||
}
|
||||
|
||||
const poisResponse = await response.json() as BravePoiResponse;
|
||||
return poisResponse;
|
||||
}
|
||||
|
||||
async function getDescriptionsData(ids: string[]): Promise<BraveDescription> {
|
||||
checkRateLimit();
|
||||
const url = new URL('https://api.search.brave.com/res/v1/local/descriptions');
|
||||
ids.filter(Boolean).forEach(id => url.searchParams.append('ids', id));
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': BRAVE_API_KEY
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`);
|
||||
}
|
||||
|
||||
const descriptionsData = await response.json() as BraveDescription;
|
||||
return descriptionsData;
|
||||
}
|
||||
|
||||
function formatLocalResults(poisData: BravePoiResponse, descData: BraveDescription): string {
|
||||
return (poisData.results || []).map(poi => {
|
||||
const address = [
|
||||
poi.address?.streetAddress ?? '',
|
||||
poi.address?.addressLocality ?? '',
|
||||
poi.address?.addressRegion ?? '',
|
||||
poi.address?.postalCode ?? ''
|
||||
].filter(part => part !== '').join(', ') || 'N/A';
|
||||
|
||||
return `Name: ${poi.name}
|
||||
Address: ${address}
|
||||
Phone: ${poi.phone || 'N/A'}
|
||||
Rating: ${poi.rating?.ratingValue ?? 'N/A'} (${poi.rating?.ratingCount ?? 0} reviews)
|
||||
Price Range: ${poi.priceRange || 'N/A'}
|
||||
Hours: ${(poi.openingHours || []).join(', ') || 'N/A'}
|
||||
Description: ${descData.descriptions[poi.id] || 'No description available'}
|
||||
`;
|
||||
}).join('\n---\n') || 'No local results found';
|
||||
}
|
||||
|
||||
// Tool handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [WEB_SEARCH_TOOL, LOCAL_SEARCH_TOOL],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
if (!args) {
|
||||
throw new Error("No arguments provided");
|
||||
}
|
||||
|
||||
switch (name) {
|
||||
case "brave_web_search": {
|
||||
if (!isBraveWebSearchArgs(args)) {
|
||||
throw new Error("Invalid arguments for brave_web_search");
|
||||
}
|
||||
const { query, count = 10 } = args;
|
||||
const results = await performWebSearch(query, count);
|
||||
return {
|
||||
content: [{ type: "text", text: results }],
|
||||
isError: false,
|
||||
};
|
||||
}
|
||||
|
||||
case "brave_local_search": {
|
||||
if (!isBraveLocalSearchArgs(args)) {
|
||||
throw new Error("Invalid arguments for brave_local_search");
|
||||
}
|
||||
const { query, count = 5 } = args;
|
||||
const results = await performLocalSearch(query, count);
|
||||
return {
|
||||
content: [{ type: "text", text: results }],
|
||||
isError: false,
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
content: [{ type: "text", text: `Unknown tool: ${name}` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Brave Search MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error running server:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-brave-search",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for Brave Search API integration",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-brave-search": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
COPY src/everart /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
|
||||
CMD ["node", "dist/index.js"]
|
||||
@@ -1,97 +0,0 @@
|
||||
# EverArt MCP Server
|
||||
|
||||
Image generation server for Claude Desktop using EverArt's API.
|
||||
|
||||
## Install
|
||||
```bash
|
||||
npm install
|
||||
export EVERART_API_KEY=your_key_here
|
||||
```
|
||||
|
||||
## Config
|
||||
Add to Claude Desktop config:
|
||||
|
||||
### Docker
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"everart": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "-e", "EVERART_API_KEY", "mcp/everart"],
|
||||
"env": {
|
||||
"EVERART_API_KEY": "your_key_here"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"everart": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-everart"],
|
||||
"env": {
|
||||
"EVERART_API_KEY": "your_key_here"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Tools
|
||||
|
||||
### generate_image
|
||||
Generates images with multiple model options. Opens result in browser and returns URL.
|
||||
|
||||
Parameters:
|
||||
```typescript
|
||||
{
|
||||
prompt: string, // Image description
|
||||
model?: string, // Model ID (default: "207910310772879360")
|
||||
image_count?: number // Number of images (default: 1)
|
||||
}
|
||||
```
|
||||
|
||||
Models:
|
||||
- 5000: FLUX1.1 (standard)
|
||||
- 9000: FLUX1.1-ultra
|
||||
- 6000: SD3.5
|
||||
- 7000: Recraft-Real
|
||||
- 8000: Recraft-Vector
|
||||
|
||||
All images generated at 1024x1024.
|
||||
|
||||
Sample usage:
|
||||
```javascript
|
||||
const result = await client.callTool({
|
||||
name: "generate_image",
|
||||
arguments: {
|
||||
prompt: "A cat sitting elegantly",
|
||||
model: "7000",
|
||||
image_count: 1
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Response format:
|
||||
```
|
||||
Image generated successfully!
|
||||
The image has been opened in your default browser.
|
||||
|
||||
Generation details:
|
||||
- Model: 7000
|
||||
- Prompt: "A cat sitting elegantly"
|
||||
- Image URL: https://storage.googleapis.com/...
|
||||
|
||||
You can also click the URL above to view the image again.
|
||||
```
|
||||
|
||||
## Building w/ Docker
|
||||
|
||||
```sh
|
||||
docker build -t mcp/everart -f src/everart/Dockerfile .
|
||||
```
|
||||
@@ -1,160 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import EverArt from "everart";
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ListResourcesRequestSchema,
|
||||
ReadResourceRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fetch from "node-fetch";
|
||||
import open from "open";
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: "example-servers/everart",
|
||||
version: "0.2.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
resources: {}, // Required for image resources
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!process.env.EVERART_API_KEY) {
|
||||
console.error("EVERART_API_KEY environment variable is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const client = new EverArt.default(process.env.EVERART_API_KEY);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [
|
||||
{
|
||||
name: "generate_image",
|
||||
description:
|
||||
"Generate images using EverArt Models and returns a clickable link to view the generated image. " +
|
||||
"The tool will return a URL that can be clicked to view the image in a browser. " +
|
||||
"Available models:\n" +
|
||||
"- 5000:FLUX1.1: Standard quality\n" +
|
||||
"- 9000:FLUX1.1-ultra: Ultra high quality\n" +
|
||||
"- 6000:SD3.5: Stable Diffusion 3.5\n" +
|
||||
"- 7000:Recraft-Real: Photorealistic style\n" +
|
||||
"- 8000:Recraft-Vector: Vector art style\n" +
|
||||
"\nThe response will contain a direct link to view the generated image.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
prompt: {
|
||||
type: "string",
|
||||
description: "Text description of desired image",
|
||||
},
|
||||
model: {
|
||||
type: "string",
|
||||
description:
|
||||
"Model ID (5000:FLUX1.1, 9000:FLUX1.1-ultra, 6000:SD3.5, 7000:Recraft-Real, 8000:Recraft-Vector)",
|
||||
default: "5000",
|
||||
},
|
||||
image_count: {
|
||||
type: "number",
|
||||
description: "Number of images to generate",
|
||||
default: 1,
|
||||
},
|
||||
},
|
||||
required: ["prompt"],
|
||||
},
|
||||
},
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(ListResourcesRequestSchema, async () => {
|
||||
return {
|
||||
resources: [
|
||||
{
|
||||
uri: "everart://images",
|
||||
mimeType: "image/png",
|
||||
name: "Generated Images",
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
if (request.params.uri === "everart://images") {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: "everart://images",
|
||||
mimeType: "image/png",
|
||||
blob: "", // Empty since this is just for listing
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
throw new Error("Resource not found");
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "generate_image") {
|
||||
try {
|
||||
const {
|
||||
prompt,
|
||||
model = "207910310772879360",
|
||||
image_count = 1,
|
||||
} = request.params.arguments as any;
|
||||
|
||||
// Use correct EverArt API method
|
||||
const generation = await client.v1.generations.create(
|
||||
model,
|
||||
prompt,
|
||||
"txt2img",
|
||||
{
|
||||
imageCount: image_count,
|
||||
height: 1024,
|
||||
width: 1024,
|
||||
},
|
||||
);
|
||||
|
||||
// Wait for generation to complete
|
||||
const completedGen = await client.v1.generations.fetchWithPolling(
|
||||
generation[0].id,
|
||||
);
|
||||
|
||||
const imgUrl = completedGen.image_url;
|
||||
if (!imgUrl) throw new Error("No image URL");
|
||||
|
||||
// Automatically open the image URL in the default browser
|
||||
await open(imgUrl);
|
||||
|
||||
// Return a formatted message with the clickable link
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Image generated successfully!\nThe image has been opened in your default browser.\n\nGeneration details:\n- Model: ${model}\n- Prompt: "${prompt}"\n- Image URL: ${imgUrl}\n\nYou can also click the URL above to view the image again.`,
|
||||
},
|
||||
],
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
console.error("Detailed error:", error);
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Unknown error";
|
||||
return {
|
||||
content: [{ type: "text", text: `Error: ${errorMessage}` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("EverArt MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch(console.error);
|
||||
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-everart",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for EverArt API integration",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-everart": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"everart": "^1.0.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"open": "^9.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -72,6 +72,14 @@ This MCP server attempts to exercise all the features of the MCP protocol. It is
|
||||
- Embedded resource with `type: "resource"`
|
||||
- Text instruction for using the resource URI
|
||||
|
||||
9. `startElicitation`
|
||||
- Initiates an elicitation (interaction) within the MCP client.
|
||||
- Inputs:
|
||||
- `color` (string): Favorite color
|
||||
- `number` (number, 1-100): Favorite number
|
||||
- `pets` (enum): Favorite pet
|
||||
- Returns: Confirmation of the elicitation demo with selection summary.
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides 100 test resources in two formats:
|
||||
@@ -126,7 +134,7 @@ The server sends random-leveled log messages every 15 seconds, e.g.:
|
||||
}
|
||||
```
|
||||
|
||||
## Usage with Claude Desktop
|
||||
## Usage with Claude Desktop (uses [stdio Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#stdio))
|
||||
|
||||
Add to your `claude_desktop_config.json`:
|
||||
|
||||
@@ -143,3 +151,75 @@ Add to your `claude_desktop_config.json`:
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Usage with VS Code
|
||||
|
||||
For quick installation, use of of the one-click install buttons below...
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D&quality=insiders)
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D&quality=insiders)
|
||||
|
||||
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
|
||||
|
||||
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
||||
|
||||
> Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
|
||||
|
||||
#### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"everything": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-everything"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Running from source with [HTTP+SSE Transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) (deprecated as of [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports))
|
||||
|
||||
```shell
|
||||
cd src/everything
|
||||
npm install
|
||||
npm run start:sse
|
||||
```
|
||||
|
||||
## Run from source with [Streamable HTTP Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http)
|
||||
|
||||
```shell
|
||||
cd src/everything
|
||||
npm install
|
||||
npm run start:streamableHttp
|
||||
```
|
||||
|
||||
## Running as an installed package
|
||||
### Install
|
||||
```shell
|
||||
npm install -g @modelcontextprotocol/server-everything@latest
|
||||
````
|
||||
|
||||
### Run the default (stdio) server
|
||||
```shell
|
||||
npx @modelcontextprotocol/server-everything
|
||||
```
|
||||
|
||||
### Or specify stdio explicitly
|
||||
```shell
|
||||
npx @modelcontextprotocol/server-everything stdio
|
||||
```
|
||||
|
||||
### Run the SSE server
|
||||
```shell
|
||||
npx @modelcontextprotocol/server-everything sse
|
||||
```
|
||||
|
||||
### Run the streamable HTTP server
|
||||
```shell
|
||||
npx @modelcontextprotocol/server-everything streamableHttp
|
||||
```
|
||||
|
||||
|
||||
@@ -20,6 +20,13 @@ import {
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import { z } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const instructions = readFileSync(join(__dirname, "instructions.md"), "utf-8");
|
||||
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema;
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>;
|
||||
@@ -79,6 +86,17 @@ const GetResourceReferenceSchema = z.object({
|
||||
.describe("ID of the resource to reference (1-100)"),
|
||||
});
|
||||
|
||||
const ElicitationSchema = z.object({});
|
||||
|
||||
const GetResourceLinksSchema = z.object({
|
||||
count: z
|
||||
.number()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.default(3)
|
||||
.describe("Number of resource links to return (1-10)"),
|
||||
});
|
||||
|
||||
enum ToolName {
|
||||
ECHO = "echo",
|
||||
ADD = "add",
|
||||
@@ -88,6 +106,8 @@ enum ToolName {
|
||||
GET_TINY_IMAGE = "getTinyImage",
|
||||
ANNOTATED_MESSAGE = "annotatedMessage",
|
||||
GET_RESOURCE_REFERENCE = "getResourceReference",
|
||||
ELICITATION = "startElicitation",
|
||||
GET_RESOURCE_LINKS = "getResourceLinks",
|
||||
}
|
||||
|
||||
enum PromptName {
|
||||
@@ -108,7 +128,10 @@ export const createServer = () => {
|
||||
resources: { subscribe: true },
|
||||
tools: {},
|
||||
logging: {},
|
||||
completions: {},
|
||||
elicitation: {},
|
||||
},
|
||||
instructions
|
||||
}
|
||||
);
|
||||
|
||||
@@ -159,9 +182,9 @@ export const createServer = () => {
|
||||
// Set up update interval for stderr messages
|
||||
stdErrUpdateInterval = setInterval(() => {
|
||||
const shortTimestamp = new Date().toLocaleTimeString([], {
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit'
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit"
|
||||
});
|
||||
server.notification({
|
||||
method: "notifications/stderr",
|
||||
@@ -197,6 +220,21 @@ export const createServer = () => {
|
||||
return await server.request(request, CreateMessageResultSchema);
|
||||
};
|
||||
|
||||
const requestElicitation = async (
|
||||
message: string,
|
||||
requestedSchema: any
|
||||
) => {
|
||||
const request = {
|
||||
method: 'elicitation/create',
|
||||
params: {
|
||||
message,
|
||||
requestedSchema
|
||||
}
|
||||
};
|
||||
|
||||
return await server.request(request, z.any());
|
||||
};
|
||||
|
||||
const ALL_RESOURCES: Resource[] = Array.from({ length: 100 }, (_, i) => {
|
||||
const uri = `test://static/resource/${i + 1}`;
|
||||
if (i % 2 === 0) {
|
||||
@@ -450,6 +488,17 @@ export const createServer = () => {
|
||||
"Returns a resource reference that can be used by MCP clients",
|
||||
inputSchema: zodToJsonSchema(GetResourceReferenceSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: ToolName.ELICITATION,
|
||||
description: "Demonstrates the Elicitation feature by asking the user to provide information about their favorite color, number, and pets.",
|
||||
inputSchema: zodToJsonSchema(ElicitationSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: ToolName.GET_RESOURCE_LINKS,
|
||||
description:
|
||||
"Returns multiple resource links that reference different types of resources",
|
||||
inputSchema: zodToJsonSchema(GetResourceLinksSchema) as ToolInput,
|
||||
},
|
||||
];
|
||||
|
||||
return { tools };
|
||||
@@ -639,6 +688,91 @@ export const createServer = () => {
|
||||
return { content };
|
||||
}
|
||||
|
||||
if (name === ToolName.ELICITATION) {
|
||||
ElicitationSchema.parse(args);
|
||||
|
||||
const elicitationResult = await requestElicitation(
|
||||
'What are your favorite things?',
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
color: { type: 'string', description: 'Favorite color' },
|
||||
number: { type: 'integer', description: 'Favorite number', minimum: 1, maximum: 100 },
|
||||
pets: {
|
||||
type: 'string',
|
||||
enum: ['cats', 'dogs', 'birds', 'fish', 'reptiles'],
|
||||
description: 'Favorite pets'
|
||||
},
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Handle different response actions
|
||||
const content = [];
|
||||
|
||||
if (elicitationResult.action === 'accept' && elicitationResult.content) {
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `✅ User provided their favorite things!`,
|
||||
});
|
||||
|
||||
// Only access elicitationResult.content when action is accept
|
||||
const { color, number, pets } = elicitationResult.content;
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `Their favorites are:\n- Color: ${color || 'not specified'}\n- Number: ${number || 'not specified'}\n- Pets: ${pets || 'not specified'}`,
|
||||
});
|
||||
} else if (elicitationResult.action === 'decline') {
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `❌ User declined to provide their favorite things.`,
|
||||
});
|
||||
} else if (elicitationResult.action === 'cancel') {
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `⚠️ User cancelled the elicitation dialog.`,
|
||||
});
|
||||
}
|
||||
|
||||
// Include raw result for debugging
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `\nRaw result: ${JSON.stringify(elicitationResult, null, 2)}`,
|
||||
});
|
||||
|
||||
return { content };
|
||||
}
|
||||
|
||||
if (name === ToolName.GET_RESOURCE_LINKS) {
|
||||
const { count } = GetResourceLinksSchema.parse(args);
|
||||
const content = [];
|
||||
|
||||
// Add intro text
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `Here are ${count} resource links to resources available in this server (see full output in tool response if your client does not support resource_link yet):`,
|
||||
});
|
||||
|
||||
// Return resource links to actual resources from ALL_RESOURCES
|
||||
const actualCount = Math.min(count, ALL_RESOURCES.length);
|
||||
for (let i = 0; i < actualCount; i++) {
|
||||
const resource = ALL_RESOURCES[i];
|
||||
content.push({
|
||||
type: "resource_link",
|
||||
uri: resource.uri,
|
||||
name: resource.name,
|
||||
description: `Resource ${i + 1}: ${
|
||||
resource.mimeType === "text/plain"
|
||||
? "plaintext resource"
|
||||
: "binary blob resource"
|
||||
}`,
|
||||
mimeType: resource.mimeType,
|
||||
});
|
||||
}
|
||||
|
||||
return { content };
|
||||
}
|
||||
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,23 +1,37 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import { createServer } from "./everything.js";
|
||||
// Parse command line arguments first
|
||||
const args = process.argv.slice(2);
|
||||
const scriptName = args[0] || 'stdio';
|
||||
|
||||
async function main() {
|
||||
const transport = new StdioServerTransport();
|
||||
const { server, cleanup } = createServer();
|
||||
|
||||
await server.connect(transport);
|
||||
|
||||
// Cleanup on exit
|
||||
process.on("SIGINT", async () => {
|
||||
await cleanup();
|
||||
await server.close();
|
||||
process.exit(0);
|
||||
});
|
||||
async function run() {
|
||||
try {
|
||||
// Dynamically import only the requested module to prevent all modules from initializing
|
||||
switch (scriptName) {
|
||||
case 'stdio':
|
||||
// Import and run the default server
|
||||
await import('./stdio.js');
|
||||
break;
|
||||
case 'sse':
|
||||
// Import and run the SSE server
|
||||
await import('./sse.js');
|
||||
break;
|
||||
case 'streamableHttp':
|
||||
// Import and run the streamable HTTP server
|
||||
await import('./streamableHttp.js');
|
||||
break;
|
||||
default:
|
||||
console.error(`Unknown script: ${scriptName}`);
|
||||
console.log('Available scripts:');
|
||||
console.log('- stdio');
|
||||
console.log('- sse');
|
||||
console.log('- streamableHttp');
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error running script:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Server error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
run();
|
||||
|
||||
13
src/everything/instructions.md
Normal file
13
src/everything/instructions.md
Normal file
@@ -0,0 +1,13 @@
|
||||
Testing and demonstration server for MCP protocol features.
|
||||
|
||||
Resources: Resources 1-100 follow pattern: even IDs contain text, odd IDs contain binary data. Resources paginated at 10 items per page with cursor-based navigation.
|
||||
|
||||
Key dependencies: Progress notifications require `_meta.progressToken` in tool calls. Resource subscriptions generate updates every 10 seconds.
|
||||
|
||||
Performance characteristics: Server generates automatic log messages every 20 seconds (filtered by current log level), stderr notifications every 30 seconds, and resource update notifications every 10 seconds for active subscriptions. Log level changes affect message filtering in real-time.
|
||||
|
||||
Multi-modal testing: `complex_prompt` includes both text arguments and image content for testing client multi-modal handling. `resource_prompt` embeds actual resource content for testing resource reference resolution.
|
||||
|
||||
Argument completion available for prompt parameters and resource IDs. Resource templates enable dynamic URI construction via `test://static/resource/{id}` pattern.
|
||||
|
||||
Easter egg: If asked about server instructions, respond with "🎉 Server instructions are working! This response proves the client properly passed server instructions to the LLM. This demonstrates MCP's instructions feature in action."
|
||||
@@ -14,14 +14,15 @@
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"build": "tsc && shx cp instructions.md dist/ && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch",
|
||||
"start": "node dist/index.js",
|
||||
"start:sse": "node dist/sse.js"
|
||||
"start:sse": "node dist/sse.js",
|
||||
"start:streamableHttp": "node dist/streamableHttp.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"@modelcontextprotocol/sdk": "^1.12.0",
|
||||
"express": "^4.21.1",
|
||||
"zod": "^3.23.8",
|
||||
"zod-to-json-schema": "^3.23.5"
|
||||
@@ -31,4 +32,4 @@
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,31 +2,52 @@ import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
||||
import express from "express";
|
||||
import { createServer } from "./everything.js";
|
||||
|
||||
console.error('Starting SSE server...');
|
||||
|
||||
const app = express();
|
||||
|
||||
const { server, cleanup } = createServer();
|
||||
|
||||
let transport: SSEServerTransport;
|
||||
const transports: Map<string, SSEServerTransport> = new Map<string, SSEServerTransport>();
|
||||
|
||||
app.get("/sse", async (req, res) => {
|
||||
console.log("Received connection");
|
||||
transport = new SSEServerTransport("/message", res);
|
||||
await server.connect(transport);
|
||||
let transport: SSEServerTransport;
|
||||
const { server, cleanup } = createServer();
|
||||
|
||||
if (req?.query?.sessionId) {
|
||||
const sessionId = (req?.query?.sessionId as string);
|
||||
transport = transports.get(sessionId) as SSEServerTransport;
|
||||
console.error("Client Reconnecting? This shouldn't happen; when client has a sessionId, GET /sse should not be called again.", transport.sessionId);
|
||||
} else {
|
||||
// Create and store transport for new session
|
||||
transport = new SSEServerTransport("/message", res);
|
||||
transports.set(transport.sessionId, transport);
|
||||
|
||||
// Connect server to transport
|
||||
await server.connect(transport);
|
||||
console.error("Client Connected: ", transport.sessionId);
|
||||
|
||||
// Handle close of connection
|
||||
server.onclose = async () => {
|
||||
console.error("Client Disconnected: ", transport.sessionId);
|
||||
transports.delete(transport.sessionId);
|
||||
await cleanup();
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
server.onclose = async () => {
|
||||
await cleanup();
|
||||
await server.close();
|
||||
process.exit(0);
|
||||
};
|
||||
});
|
||||
|
||||
app.post("/message", async (req, res) => {
|
||||
console.log("Received message");
|
||||
|
||||
await transport.handlePostMessage(req, res);
|
||||
const sessionId = (req?.query?.sessionId as string);
|
||||
const transport = transports.get(sessionId);
|
||||
if (transport) {
|
||||
console.error("Client Message from", sessionId);
|
||||
await transport.handlePostMessage(req, res);
|
||||
} else {
|
||||
console.error(`No transport found for sessionId ${sessionId}`)
|
||||
}
|
||||
});
|
||||
|
||||
const PORT = process.env.PORT || 3001;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Server is running on port ${PORT}`);
|
||||
console.error(`Server is running on port ${PORT}`);
|
||||
});
|
||||
|
||||
26
src/everything/stdio.ts
Normal file
26
src/everything/stdio.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import { createServer } from "./everything.js";
|
||||
|
||||
console.error('Starting default (STDIO) server...');
|
||||
|
||||
async function main() {
|
||||
const transport = new StdioServerTransport();
|
||||
const {server, cleanup} = createServer();
|
||||
|
||||
await server.connect(transport);
|
||||
|
||||
// Cleanup on exit
|
||||
process.on("SIGINT", async () => {
|
||||
await cleanup();
|
||||
await server.close();
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Server error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
176
src/everything/streamableHttp.ts
Normal file
176
src/everything/streamableHttp.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
|
||||
import { InMemoryEventStore } from '@modelcontextprotocol/sdk/examples/shared/inMemoryEventStore.js';
|
||||
import express, { Request, Response } from "express";
|
||||
import { createServer } from "./everything.js";
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
console.error('Starting Streamable HTTP server...');
|
||||
|
||||
const app = express();
|
||||
|
||||
const transports: Map<string, StreamableHTTPServerTransport> = new Map<string, StreamableHTTPServerTransport>();
|
||||
|
||||
app.post('/mcp', async (req: Request, res: Response) => {
|
||||
console.error('Received MCP POST request');
|
||||
try {
|
||||
// Check for existing session ID
|
||||
const sessionId = req.headers['mcp-session-id'] as string | undefined;
|
||||
let transport: StreamableHTTPServerTransport;
|
||||
|
||||
if (sessionId && transports.has(sessionId)) {
|
||||
// Reuse existing transport
|
||||
transport = transports.get(sessionId)!;
|
||||
} else if (!sessionId) {
|
||||
|
||||
const { server, cleanup } = createServer();
|
||||
|
||||
// New initialization request
|
||||
const eventStore = new InMemoryEventStore();
|
||||
transport = new StreamableHTTPServerTransport({
|
||||
sessionIdGenerator: () => randomUUID(),
|
||||
eventStore, // Enable resumability
|
||||
onsessioninitialized: (sessionId: string) => {
|
||||
// Store the transport by session ID when session is initialized
|
||||
// This avoids race conditions where requests might come in before the session is stored
|
||||
console.error(`Session initialized with ID: ${sessionId}`);
|
||||
transports.set(sessionId, transport);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Set up onclose handler to clean up transport when closed
|
||||
server.onclose = async () => {
|
||||
const sid = transport.sessionId;
|
||||
if (sid && transports.has(sid)) {
|
||||
console.error(`Transport closed for session ${sid}, removing from transports map`);
|
||||
transports.delete(sid);
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
||||
// Connect the transport to the MCP server BEFORE handling the request
|
||||
// so responses can flow back through the same transport
|
||||
await server.connect(transport);
|
||||
|
||||
await transport.handleRequest(req, res);
|
||||
return; // Already handled
|
||||
} else {
|
||||
// Invalid request - no session ID or not initialization request
|
||||
res.status(400).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32000,
|
||||
message: 'Bad Request: No valid session ID provided',
|
||||
},
|
||||
id: req?.body?.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle the request with existing transport - no need to reconnect
|
||||
// The existing transport is already connected to the server
|
||||
await transport.handleRequest(req, res);
|
||||
} catch (error) {
|
||||
console.error('Error handling MCP request:', error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32603,
|
||||
message: 'Internal server error',
|
||||
},
|
||||
id: req?.body?.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle GET requests for SSE streams (using built-in support from StreamableHTTP)
|
||||
app.get('/mcp', async (req: Request, res: Response) => {
|
||||
console.error('Received MCP GET request');
|
||||
const sessionId = req.headers['mcp-session-id'] as string | undefined;
|
||||
if (!sessionId || !transports.has(sessionId)) {
|
||||
res.status(400).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32000,
|
||||
message: 'Bad Request: No valid session ID provided',
|
||||
},
|
||||
id: req?.body?.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for Last-Event-ID header for resumability
|
||||
const lastEventId = req.headers['last-event-id'] as string | undefined;
|
||||
if (lastEventId) {
|
||||
console.error(`Client reconnecting with Last-Event-ID: ${lastEventId}`);
|
||||
} else {
|
||||
console.error(`Establishing new SSE stream for session ${sessionId}`);
|
||||
}
|
||||
|
||||
const transport = transports.get(sessionId);
|
||||
await transport!.handleRequest(req, res);
|
||||
});
|
||||
|
||||
// Handle DELETE requests for session termination (according to MCP spec)
|
||||
app.delete('/mcp', async (req: Request, res: Response) => {
|
||||
const sessionId = req.headers['mcp-session-id'] as string | undefined;
|
||||
if (!sessionId || !transports.has(sessionId)) {
|
||||
res.status(400).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32000,
|
||||
message: 'Bad Request: No valid session ID provided',
|
||||
},
|
||||
id: req?.body?.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
console.error(`Received session termination request for session ${sessionId}`);
|
||||
|
||||
try {
|
||||
const transport = transports.get(sessionId);
|
||||
await transport!.handleRequest(req, res);
|
||||
} catch (error) {
|
||||
console.error('Error handling session termination:', error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32603,
|
||||
message: 'Error handling session termination',
|
||||
},
|
||||
id: req?.body?.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start the server
|
||||
const PORT = process.env.PORT || 3001;
|
||||
app.listen(PORT, () => {
|
||||
console.error(`MCP Streamable HTTP Server listening on port ${PORT}`);
|
||||
});
|
||||
|
||||
// Handle server shutdown
|
||||
process.on('SIGINT', async () => {
|
||||
console.error('Shutting down server...');
|
||||
|
||||
// Close all active transports to properly clean up resources
|
||||
for (const sessionId in transports) {
|
||||
try {
|
||||
console.error(`Closing transport for session ${sessionId}`);
|
||||
await transports.get(sessionId)!.close();
|
||||
transports.delete(sessionId);
|
||||
} catch (error) {
|
||||
console.error(`Error closing transport for session ${sessionId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
console.error('Server shutdown complete');
|
||||
process.exit(0);
|
||||
});
|
||||
@@ -2,6 +2,9 @@
|
||||
|
||||
A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption.
|
||||
|
||||
> [!CAUTION]
|
||||
> This server can access local/internal IP addresses and may represent a security risk. Exercise caution when using this MCP server to ensure this does not expose any sensitive data.
|
||||
|
||||
The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need.
|
||||
|
||||
### Available Tools
|
||||
@@ -52,10 +55,12 @@ Add to your Claude settings:
|
||||
<summary>Using uvx</summary>
|
||||
|
||||
```json
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-fetch"]
|
||||
{
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-fetch"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -65,10 +70,12 @@ Add to your Claude settings:
|
||||
<summary>Using docker</summary>
|
||||
|
||||
```json
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "mcp/fetch"]
|
||||
{
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "mcp/fetch"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -78,10 +85,60 @@ Add to your Claude settings:
|
||||
<summary>Using pip installation</summary>
|
||||
|
||||
```json
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "python",
|
||||
"args": ["-m", "mcp_server_fetch"]
|
||||
{
|
||||
"mcpServers": {
|
||||
"fetch": {
|
||||
"command": "python",
|
||||
"args": ["-m", "mcp_server_fetch"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
### Configure for VS Code
|
||||
|
||||
For quick installation, use one of the one-click install buttons below...
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D&quality=insiders)
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D&quality=insiders)
|
||||
|
||||
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
|
||||
|
||||
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
||||
|
||||
> Note that the `mcp` key is needed when using the `mcp.json` file.
|
||||
|
||||
<details>
|
||||
<summary>Using uvx</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"fetch": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-fetch"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Using Docker</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"fetch": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "mcp/fetch"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -9,8 +9,58 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio
|
||||
- Move files/directories
|
||||
- Search files
|
||||
- Get file metadata
|
||||
- Dynamic directory access control via [Roots](https://modelcontextprotocol.io/docs/concepts/roots)
|
||||
|
||||
## Directory Access Control
|
||||
|
||||
The server uses a flexible directory access control system. Directories can be specified via command-line arguments or dynamically via [Roots](https://modelcontextprotocol.io/docs/concepts/roots).
|
||||
|
||||
### Method 1: Command-line Arguments
|
||||
Specify Allowed directories when starting the server:
|
||||
```bash
|
||||
mcp-server-filesystem /path/to/dir1 /path/to/dir2
|
||||
```
|
||||
|
||||
### Method 2: MCP Roots (Recommended)
|
||||
MCP clients that support [Roots](https://modelcontextprotocol.io/docs/concepts/roots) can dynamically update the Allowed directories.
|
||||
|
||||
Roots notified by Client to Server, completely replace any server-side Allowed directories when provided.
|
||||
|
||||
**Important**: If server starts without command-line arguments AND client doesn't support roots protocol (or provides empty roots), the server will throw an error during initialization.
|
||||
|
||||
This is the recommended method, as this enables runtime directory updates via `roots/list_changed` notifications without server restart, providing a more flexible and modern integration experience.
|
||||
|
||||
### How It Works
|
||||
|
||||
The server's directory access control follows this flow:
|
||||
|
||||
1. **Server Startup**
|
||||
- Server starts with directories from command-line arguments (if provided)
|
||||
- If no arguments provided, server starts with empty allowed directories
|
||||
|
||||
2. **Client Connection & Initialization**
|
||||
- Client connects and sends `initialize` request with capabilities
|
||||
- Server checks if client supports roots protocol (`capabilities.roots`)
|
||||
|
||||
3. **Roots Protocol Handling** (if client supports roots)
|
||||
- **On initialization**: Server requests roots from client via `roots/list`
|
||||
- Client responds with its configured roots
|
||||
- Server replaces ALL allowed directories with client's roots
|
||||
- **On runtime updates**: Client can send `notifications/roots/list_changed`
|
||||
- Server requests updated roots and replaces allowed directories again
|
||||
|
||||
4. **Fallback Behavior** (if client doesn't support roots)
|
||||
- Server continues using command-line directories only
|
||||
- No dynamic updates possible
|
||||
|
||||
5. **Access Control**
|
||||
- All filesystem operations are restricted to allowed directories
|
||||
- Use `list_allowed_directories` tool to see current directories
|
||||
- Server requires at least ONE allowed directory to operate
|
||||
|
||||
**Note**: The server will only allow operations within directories specified either via `args` or via Roots.
|
||||
|
||||
|
||||
**Note**: The server will only allow operations within directories specified via `args`.
|
||||
|
||||
## API
|
||||
|
||||
@@ -143,6 +193,64 @@ Note: all directories must be mounted to `/projects` by default.
|
||||
}
|
||||
```
|
||||
|
||||
## Usage with VS Code
|
||||
|
||||
For quick installation, click the installation buttons below...
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D&quality=insiders)
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D&quality=insiders)
|
||||
|
||||
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
|
||||
|
||||
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
||||
|
||||
> Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
|
||||
|
||||
You can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server.
|
||||
|
||||
### Docker
|
||||
Note: all directories must be mounted to `/projects` by default.
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"filesystem": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"--mount", "type=bind,src=${workspaceFolder},dst=/projects/workspace",
|
||||
"mcp/filesystem",
|
||||
"/projects"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"filesystem": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-filesystem",
|
||||
"${workspaceFolder}"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
169
src/filesystem/__tests__/path-utils.test.ts
Normal file
169
src/filesystem/__tests__/path-utils.test.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { describe, it, expect } from '@jest/globals';
|
||||
import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js';
|
||||
|
||||
describe('Path Utilities', () => {
|
||||
describe('convertToWindowsPath', () => {
|
||||
it('leaves Unix paths unchanged', () => {
|
||||
expect(convertToWindowsPath('/usr/local/bin'))
|
||||
.toBe('/usr/local/bin');
|
||||
expect(convertToWindowsPath('/home/user/some path'))
|
||||
.toBe('/home/user/some path');
|
||||
});
|
||||
|
||||
it('converts WSL paths to Windows format', () => {
|
||||
expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('converts Unix-style Windows paths to Windows format', () => {
|
||||
expect(convertToWindowsPath('/c/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('leaves Windows paths unchanged but ensures backslashes', () => {
|
||||
expect(convertToWindowsPath('C:\\NS\\MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
expect(convertToWindowsPath('C:/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('handles Windows paths with spaces', () => {
|
||||
expect(convertToWindowsPath('C:\\Program Files\\Some App'))
|
||||
.toBe('C:\\Program Files\\Some App');
|
||||
expect(convertToWindowsPath('C:/Program Files/Some App'))
|
||||
.toBe('C:\\Program Files\\Some App');
|
||||
});
|
||||
|
||||
it('handles uppercase and lowercase drive letters', () => {
|
||||
expect(convertToWindowsPath('/mnt/d/some/path'))
|
||||
.toBe('D:\\some\\path');
|
||||
expect(convertToWindowsPath('/d/some/path'))
|
||||
.toBe('D:\\some\\path');
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizePath', () => {
|
||||
it('preserves Unix paths', () => {
|
||||
expect(normalizePath('/usr/local/bin'))
|
||||
.toBe('/usr/local/bin');
|
||||
expect(normalizePath('/home/user/some path'))
|
||||
.toBe('/home/user/some path');
|
||||
expect(normalizePath('"/usr/local/some app/"'))
|
||||
.toBe('/usr/local/some app');
|
||||
});
|
||||
|
||||
it('removes surrounding quotes', () => {
|
||||
expect(normalizePath('"C:\\NS\\My Kindle Content"'))
|
||||
.toBe('C:\\NS\\My Kindle Content');
|
||||
});
|
||||
|
||||
it('normalizes backslashes', () => {
|
||||
expect(normalizePath('C:\\\\NS\\\\MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('converts forward slashes to backslashes on Windows', () => {
|
||||
expect(normalizePath('C:/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('handles WSL paths', () => {
|
||||
expect(normalizePath('/mnt/c/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('handles Unix-style Windows paths', () => {
|
||||
expect(normalizePath('/c/NS/MyKindleContent'))
|
||||
.toBe('C:\\NS\\MyKindleContent');
|
||||
});
|
||||
|
||||
it('handles paths with spaces and mixed slashes', () => {
|
||||
expect(normalizePath('C:/NS/My Kindle Content'))
|
||||
.toBe('C:\\NS\\My Kindle Content');
|
||||
expect(normalizePath('/mnt/c/NS/My Kindle Content'))
|
||||
.toBe('C:\\NS\\My Kindle Content');
|
||||
expect(normalizePath('C:\\Program Files (x86)\\App Name'))
|
||||
.toBe('C:\\Program Files (x86)\\App Name');
|
||||
expect(normalizePath('"C:\\Program Files\\App Name"'))
|
||||
.toBe('C:\\Program Files\\App Name');
|
||||
expect(normalizePath(' C:\\Program Files\\App Name '))
|
||||
.toBe('C:\\Program Files\\App Name');
|
||||
});
|
||||
|
||||
it('preserves spaces in all path formats', () => {
|
||||
expect(normalizePath('/mnt/c/Program Files/App Name'))
|
||||
.toBe('C:\\Program Files\\App Name');
|
||||
expect(normalizePath('/c/Program Files/App Name'))
|
||||
.toBe('C:\\Program Files\\App Name');
|
||||
expect(normalizePath('C:/Program Files/App Name'))
|
||||
.toBe('C:\\Program Files\\App Name');
|
||||
});
|
||||
|
||||
it('handles special characters in paths', () => {
|
||||
// Test ampersand in path
|
||||
expect(normalizePath('C:\\NS\\Sub&Folder'))
|
||||
.toBe('C:\\NS\\Sub&Folder');
|
||||
expect(normalizePath('C:/NS/Sub&Folder'))
|
||||
.toBe('C:\\NS\\Sub&Folder');
|
||||
expect(normalizePath('/mnt/c/NS/Sub&Folder'))
|
||||
.toBe('C:\\NS\\Sub&Folder');
|
||||
|
||||
// Test tilde in path (short names in Windows)
|
||||
expect(normalizePath('C:\\NS\\MYKIND~1'))
|
||||
.toBe('C:\\NS\\MYKIND~1');
|
||||
expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'))
|
||||
.toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1');
|
||||
|
||||
// Test other special characters
|
||||
expect(normalizePath('C:\\Path with #hash'))
|
||||
.toBe('C:\\Path with #hash');
|
||||
expect(normalizePath('C:\\Path with (parentheses)'))
|
||||
.toBe('C:\\Path with (parentheses)');
|
||||
expect(normalizePath('C:\\Path with [brackets]'))
|
||||
.toBe('C:\\Path with [brackets]');
|
||||
expect(normalizePath('C:\\Path with @at+plus$dollar%percent'))
|
||||
.toBe('C:\\Path with @at+plus$dollar%percent');
|
||||
});
|
||||
|
||||
it('capitalizes lowercase drive letters for Windows paths', () => {
|
||||
expect(normalizePath('c:/windows/system32'))
|
||||
.toBe('C:\\windows\\system32');
|
||||
expect(normalizePath('/mnt/d/my/folder')) // WSL path with lowercase drive
|
||||
.toBe('D:\\my\\folder');
|
||||
expect(normalizePath('/e/another/folder')) // Unix-style Windows path with lowercase drive
|
||||
.toBe('E:\\another\\folder');
|
||||
});
|
||||
|
||||
it('handles UNC paths correctly', () => {
|
||||
// UNC paths should preserve the leading double backslash
|
||||
const uncPath = '\\\\SERVER\\share\\folder';
|
||||
expect(normalizePath(uncPath)).toBe('\\\\SERVER\\share\\folder');
|
||||
|
||||
// Test UNC path with double backslashes that need normalization
|
||||
const uncPathWithDoubles = '\\\\\\\\SERVER\\\\share\\\\folder';
|
||||
expect(normalizePath(uncPathWithDoubles)).toBe('\\\\SERVER\\share\\folder');
|
||||
});
|
||||
|
||||
it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => {
|
||||
// Relative path
|
||||
const relativePath = 'some/relative/path';
|
||||
expect(normalizePath(relativePath)).toBe(relativePath.replace(/\//g, '\\'));
|
||||
|
||||
// A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion
|
||||
const otherAbsolutePath = '\\someserver\\share\\file';
|
||||
expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('expandHome', () => {
|
||||
it('expands ~ to home directory', () => {
|
||||
const result = expandHome('~/test');
|
||||
expect(result).toContain('test');
|
||||
expect(result).not.toContain('~');
|
||||
});
|
||||
|
||||
it('leaves other paths unchanged', () => {
|
||||
expect(expandHome('C:/test')).toBe('C:/test');
|
||||
});
|
||||
});
|
||||
});
|
||||
844
src/filesystem/__tests__/path-validation.test.ts
Normal file
844
src/filesystem/__tests__/path-validation.test.ts
Normal file
@@ -0,0 +1,844 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
import { isPathWithinAllowedDirectories } from '../path-validation.js';
|
||||
|
||||
describe('Path Validation', () => {
|
||||
it('allows exact directory match', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
|
||||
});
|
||||
|
||||
it('allows subdirectories', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/src/index.js', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/deeply/nested/file.txt', allowed)).toBe(true);
|
||||
});
|
||||
|
||||
it('blocks similar directory names (prefix vulnerability)', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project_backup', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project-old', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/projectile', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project.bak', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('blocks paths outside allowed directories', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles multiple allowed directories', () => {
|
||||
const allowed = ['/home/user/project1', '/home/user/project2'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project1/src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2/src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project3', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project1_backup', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2-old', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('blocks parent and sibling directories', () => {
|
||||
const allowed = ['/test/allowed'];
|
||||
|
||||
// Parent directory
|
||||
expect(isPathWithinAllowedDirectories('/test', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false);
|
||||
|
||||
// Sibling with common prefix
|
||||
expect(isPathWithinAllowedDirectories('/test/allowed_sibling', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/test/allowed2', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles paths with special characters', () => {
|
||||
const allowed = ['/home/user/my-project (v2)'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)/src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)_backup', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my-project', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
describe('Input validation', () => {
|
||||
it('rejects empty inputs', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', [])).toBe(false);
|
||||
});
|
||||
|
||||
it('handles trailing separators correctly', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Path with trailing separator should still match
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true);
|
||||
|
||||
// Allowed directory with trailing separator
|
||||
const allowedWithSep = ['/home/user/project/'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', allowedWithSep)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/', allowedWithSep)).toBe(true);
|
||||
|
||||
// Should still block similar names with or without trailing separators
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2', allowedWithSep)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2/', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('skips empty directory entries in allowed list', () => {
|
||||
const allowed = ['', '/home/user/project', ''];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
|
||||
|
||||
// Should still validate properly with empty entries
|
||||
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles Windows paths with trailing separators', () => {
|
||||
if (path.sep === '\\') {
|
||||
const allowed = ['C:\\Users\\project'];
|
||||
|
||||
// Path with trailing separator
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowed)).toBe(true);
|
||||
|
||||
// Allowed with trailing separator
|
||||
const allowedWithSep = ['C:\\Users\\project\\'];
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowedWithSep)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowedWithSep)).toBe(true);
|
||||
|
||||
// Should still block similar names
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project2\\', allowed)).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('normalizes relative paths to absolute', () => {
|
||||
const allowed = [process.cwd()];
|
||||
|
||||
// Relative paths get normalized to absolute paths based on cwd
|
||||
expect(isPathWithinAllowedDirectories('relative/path', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('./file', allowed)).toBe(true);
|
||||
|
||||
// Parent directory references that escape allowed directory
|
||||
const parentAllowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('../parent', parentAllowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for relative paths in allowed directories', () => {
|
||||
const badAllowed = ['relative/path', '/some/other/absolute/path'];
|
||||
|
||||
// Relative paths in allowed dirs are normalized to absolute based on cwd
|
||||
// The normalized 'relative/path' won't match our test path
|
||||
expect(isPathWithinAllowedDirectories('/some/other/absolute/path/file', badAllowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/absolute/path/file', badAllowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles null and undefined inputs gracefully', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Should return false, not crash
|
||||
expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/path', null as any)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/path', undefined as any)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unicode and special characters', () => {
|
||||
it('handles unicode characters in paths', () => {
|
||||
const allowed = ['/home/user/café'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('/home/user/café', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/café/file', allowed)).toBe(true);
|
||||
|
||||
// Different unicode representation won't match (not normalized)
|
||||
const decomposed = '/home/user/cafe\u0301'; // e + combining accent
|
||||
expect(isPathWithinAllowedDirectories(decomposed, allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles paths with spaces correctly', () => {
|
||||
const allowed = ['/home/user/my project'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my project/file', allowed)).toBe(true);
|
||||
|
||||
// Partial matches should fail
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/my proj', allowed)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Overlapping allowed directories', () => {
|
||||
it('handles nested allowed directories correctly', () => {
|
||||
const allowed = ['/home', '/home/user', '/home/user/project'];
|
||||
|
||||
// All paths under /home are allowed
|
||||
expect(isPathWithinAllowedDirectories('/home/anything', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/anything', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/anything', allowed)).toBe(true);
|
||||
|
||||
// First match wins (most permissive)
|
||||
expect(isPathWithinAllowedDirectories('/home/other/deep/path', allowed)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles root directory as allowed', () => {
|
||||
const allowed = ['/'];
|
||||
|
||||
// Everything is allowed under root (dangerous configuration)
|
||||
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/any/path', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/secret', allowed)).toBe(true);
|
||||
|
||||
// But only on the same filesystem root
|
||||
if (path.sep === '\\') {
|
||||
expect(isPathWithinAllowedDirectories('D:\\other', ['/'])).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cross-platform behavior', () => {
|
||||
it('handles Windows-style paths on Windows', () => {
|
||||
if (path.sep === '\\') {
|
||||
const allowed = ['C:\\Users\\project'];
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project2', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users\\project_backup', allowed)).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('handles Unix-style paths on Unix', () => {
|
||||
if (path.sep === '/') {
|
||||
const allowed = ['/home/user/project'];
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validation Tests - Path Traversal', () => {
|
||||
it('blocks path traversal attempts', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Basic traversal attempts
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/../../../etc/passwd', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/../../other', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/../project2', allowed)).toBe(false);
|
||||
|
||||
// Mixed traversal with valid segments
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/src/../../project2', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/./../../other', allowed)).toBe(false);
|
||||
|
||||
// Multiple traversal sequences
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/../project/../../../etc', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('blocks traversal in allowed directories', () => {
|
||||
const allowed = ['/home/user/project/../safe'];
|
||||
|
||||
// The allowed directory itself should be normalized and safe
|
||||
expect(isPathWithinAllowedDirectories('/home/user/safe/file', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles complex traversal patterns', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Double dots in filenames (not traversal) - these normalize to paths within allowed dir
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/..test', allowed)).toBe(true); // Not traversal
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/test..', allowed)).toBe(true); // Not traversal
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/te..st', allowed)).toBe(true); // Not traversal
|
||||
|
||||
// Actual traversal
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/../test', allowed)).toBe(false); // Is traversal - goes to /home/user/test
|
||||
|
||||
// Edge case: /home/user/project/.. normalizes to /home/user (parent dir)
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // Goes to parent
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validation Tests - Null Bytes', () => {
|
||||
it('rejects paths with null bytes', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project\x00/etc/passwd', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/test\x00.txt', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('\x00/home/user/project', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/\x00', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('rejects allowed directories with null bytes', () => {
|
||||
const allowed = ['/home/user/project\x00'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validation Tests - Special Characters', () => {
|
||||
it('allows percent signs in filenames', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Percent is a valid filename character
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/report_50%.pdf', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/Q1_25%_growth', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/%41', allowed)).toBe(true); // File named %41
|
||||
|
||||
// URL encoding is NOT decoded by path.normalize, so these are just odd filenames
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/%2e%2e', allowed)).toBe(true); // File named "%2e%2e"
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/file%20name', allowed)).toBe(true); // File with %20 in name
|
||||
});
|
||||
|
||||
it('handles percent signs in allowed directories', () => {
|
||||
const allowed = ['/home/user/project%20files'];
|
||||
|
||||
// This is a directory literally named "project%20files"
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project%20files/test', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project files/test', allowed)).toBe(false); // Different dir
|
||||
});
|
||||
});
|
||||
|
||||
describe('Path Normalization', () => {
|
||||
it('normalizes paths before comparison', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Trailing slashes
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project//', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project///', allowed)).toBe(true);
|
||||
|
||||
// Current directory references
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/./src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/./project/src', allowed)).toBe(true);
|
||||
|
||||
// Multiple slashes
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project//src//file', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home//user//project//src', allowed)).toBe(true);
|
||||
|
||||
// Should still block outside paths
|
||||
expect(isPathWithinAllowedDirectories('/home/user//project2', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles mixed separators correctly', () => {
|
||||
if (path.sep === '\\') {
|
||||
const allowed = ['C:\\Users\\project'];
|
||||
|
||||
// Mixed separators should be normalized
|
||||
expect(isPathWithinAllowedDirectories('C:/Users/project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('C:\\Users/project\\src', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('C:/Users\\project/src', allowed)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('rejects non-string inputs safely', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories(123 as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories({} as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories([] as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false);
|
||||
|
||||
// Non-string in allowed directories
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', [123 as any])).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project', [{} as any])).toBe(false);
|
||||
});
|
||||
|
||||
it('handles very long paths', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Create a very long path that's still valid
|
||||
const longSubPath = 'a/'.repeat(1000) + 'file.txt';
|
||||
expect(isPathWithinAllowedDirectories(`/home/user/project/${longSubPath}`, allowed)).toBe(true);
|
||||
|
||||
// Very long path that escapes
|
||||
const escapePath = 'a/'.repeat(1000) + '../'.repeat(1001) + 'etc/passwd';
|
||||
expect(isPathWithinAllowedDirectories(`/home/user/project/${escapePath}`, allowed)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Additional Coverage', () => {
|
||||
it('handles allowed directories with traversal that normalizes safely', () => {
|
||||
// These allowed dirs contain traversal but normalize to valid paths
|
||||
const allowed = ['/home/user/../user/project'];
|
||||
|
||||
// Should normalize to /home/user/project and work correctly
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('handles symbolic dots in filenames', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
|
||||
// Single and double dots as actual filenames (not traversal)
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/.', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // This normalizes to parent
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/...', allowed)).toBe(true); // Three dots is a valid filename
|
||||
expect(isPathWithinAllowedDirectories('/home/user/project/....', allowed)).toBe(true); // Four dots is a valid filename
|
||||
});
|
||||
|
||||
it('handles UNC paths on Windows', () => {
|
||||
if (path.sep === '\\') {
|
||||
const allowed = ['\\\\server\\share\\project'];
|
||||
|
||||
expect(isPathWithinAllowedDirectories('\\\\server\\share\\project', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('\\\\server\\share\\project\\file', allowed)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories('\\\\server\\share\\other', allowed)).toBe(false);
|
||||
expect(isPathWithinAllowedDirectories('\\\\other\\share\\project', allowed)).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Symlink Tests', () => {
|
||||
let testDir: string;
|
||||
let allowedDir: string;
|
||||
let forbiddenDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fs-error-test-'));
|
||||
allowedDir = path.join(testDir, 'allowed');
|
||||
forbiddenDir = path.join(testDir, 'forbidden');
|
||||
|
||||
await fs.mkdir(allowedDir, { recursive: true });
|
||||
await fs.mkdir(forbiddenDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('validates symlink handling', async () => {
|
||||
// Test with symlinks
|
||||
try {
|
||||
const linkPath = path.join(allowedDir, 'bad-link');
|
||||
const targetPath = path.join(forbiddenDir, 'target.txt');
|
||||
|
||||
await fs.writeFile(targetPath, 'content');
|
||||
await fs.symlink(targetPath, linkPath);
|
||||
|
||||
// In real implementation, this would throw with the resolved path
|
||||
const realPath = await fs.realpath(linkPath);
|
||||
const allowed = [allowedDir];
|
||||
|
||||
// Symlink target should be outside allowed directory
|
||||
expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false);
|
||||
} catch (error) {
|
||||
// Skip if no symlink permissions
|
||||
}
|
||||
});
|
||||
|
||||
it('handles non-existent paths correctly', async () => {
|
||||
const newFilePath = path.join(allowedDir, 'subdir', 'newfile.txt');
|
||||
|
||||
// Parent directory doesn't exist
|
||||
try {
|
||||
await fs.access(newFilePath);
|
||||
} catch (error) {
|
||||
expect((error as NodeJS.ErrnoException).code).toBe('ENOENT');
|
||||
}
|
||||
|
||||
// After creating parent, validation should work
|
||||
await fs.mkdir(path.dirname(newFilePath), { recursive: true });
|
||||
const allowed = [allowedDir];
|
||||
expect(isPathWithinAllowedDirectories(newFilePath, allowed)).toBe(true);
|
||||
});
|
||||
|
||||
// Test path resolution consistency for symlinked files
|
||||
it('validates symlinked files consistently between path and resolved forms', async () => {
|
||||
try {
|
||||
// Setup: Create target file in forbidden area
|
||||
const targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
await fs.writeFile(targetFile, 'TARGET_CONTENT');
|
||||
|
||||
// Create symlink inside allowed directory pointing to forbidden file
|
||||
const symlinkPath = path.join(allowedDir, 'link-to-target.txt');
|
||||
await fs.symlink(targetFile, symlinkPath);
|
||||
|
||||
// The symlink path itself passes validation (looks like it's in allowed dir)
|
||||
expect(isPathWithinAllowedDirectories(symlinkPath, [allowedDir])).toBe(true);
|
||||
|
||||
// But the resolved path should fail validation
|
||||
const resolvedPath = await fs.realpath(symlinkPath);
|
||||
expect(isPathWithinAllowedDirectories(resolvedPath, [allowedDir])).toBe(false);
|
||||
|
||||
// Verify the resolved path goes to the forbidden location (normalize both paths for macOS temp dirs)
|
||||
expect(await fs.realpath(resolvedPath)).toBe(await fs.realpath(targetFile));
|
||||
} catch (error) {
|
||||
// Skip if no symlink permissions on the system
|
||||
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Test allowed directory resolution behavior
|
||||
it('validates paths correctly when allowed directory is resolved from symlink', async () => {
|
||||
try {
|
||||
// Setup: Create the actual target directory with content
|
||||
const actualTargetDir = path.join(testDir, 'actual-target');
|
||||
await fs.mkdir(actualTargetDir, { recursive: true });
|
||||
const targetFile = path.join(actualTargetDir, 'file.txt');
|
||||
await fs.writeFile(targetFile, 'FILE_CONTENT');
|
||||
|
||||
// Setup: Create symlink directory that points to target
|
||||
const symlinkDir = path.join(testDir, 'symlink-dir');
|
||||
await fs.symlink(actualTargetDir, symlinkDir);
|
||||
|
||||
// Simulate resolved allowed directory (what the server startup should do)
|
||||
const resolvedAllowedDir = await fs.realpath(symlinkDir);
|
||||
const resolvedTargetDir = await fs.realpath(actualTargetDir);
|
||||
expect(resolvedAllowedDir).toBe(resolvedTargetDir);
|
||||
|
||||
// Test 1: File access through original symlink path should pass validation with resolved allowed dir
|
||||
const fileViaSymlink = path.join(symlinkDir, 'file.txt');
|
||||
const resolvedFile = await fs.realpath(fileViaSymlink);
|
||||
expect(isPathWithinAllowedDirectories(resolvedFile, [resolvedAllowedDir])).toBe(true);
|
||||
|
||||
// Test 2: File access through resolved path should also pass validation
|
||||
const fileViaResolved = path.join(resolvedTargetDir, 'file.txt');
|
||||
expect(isPathWithinAllowedDirectories(fileViaResolved, [resolvedAllowedDir])).toBe(true);
|
||||
|
||||
// Test 3: Demonstrate inconsistent behavior with unresolved allowed directories
|
||||
// If allowed dirs were not resolved (storing symlink paths instead):
|
||||
const unresolvedAllowedDirs = [symlinkDir];
|
||||
// This validation would incorrectly fail for the same content:
|
||||
expect(isPathWithinAllowedDirectories(resolvedFile, unresolvedAllowedDirs)).toBe(false);
|
||||
|
||||
} catch (error) {
|
||||
// Skip if no symlink permissions on the system
|
||||
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('resolves nested symlink chains completely', async () => {
|
||||
try {
|
||||
// Setup: Create target file in forbidden area
|
||||
const actualTarget = path.join(forbiddenDir, 'target-file.txt');
|
||||
await fs.writeFile(actualTarget, 'FINAL_CONTENT');
|
||||
|
||||
// Create chain of symlinks: allowedFile -> link2 -> link1 -> actualTarget
|
||||
const link1 = path.join(testDir, 'intermediate-link1');
|
||||
const link2 = path.join(testDir, 'intermediate-link2');
|
||||
const allowedFile = path.join(allowedDir, 'seemingly-safe-file');
|
||||
|
||||
await fs.symlink(actualTarget, link1);
|
||||
await fs.symlink(link1, link2);
|
||||
await fs.symlink(link2, allowedFile);
|
||||
|
||||
// The allowed file path passes basic validation
|
||||
expect(isPathWithinAllowedDirectories(allowedFile, [allowedDir])).toBe(true);
|
||||
|
||||
// But complete resolution reveals the forbidden target
|
||||
const fullyResolvedPath = await fs.realpath(allowedFile);
|
||||
expect(isPathWithinAllowedDirectories(fullyResolvedPath, [allowedDir])).toBe(false);
|
||||
expect(await fs.realpath(fullyResolvedPath)).toBe(await fs.realpath(actualTarget));
|
||||
|
||||
} catch (error) {
|
||||
// Skip if no symlink permissions on the system
|
||||
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Path Validation Race Condition Tests', () => {
|
||||
let testDir: string;
|
||||
let allowedDir: string;
|
||||
let forbiddenDir: string;
|
||||
let targetFile: string;
|
||||
let testPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'race-test-'));
|
||||
allowedDir = path.join(testDir, 'allowed');
|
||||
forbiddenDir = path.join(testDir, 'outside');
|
||||
targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
testPath = path.join(allowedDir, 'test.txt');
|
||||
|
||||
await fs.mkdir(allowedDir, { recursive: true });
|
||||
await fs.mkdir(forbiddenDir, { recursive: true });
|
||||
await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8');
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('validates non-existent file paths based on parent directory', async () => {
|
||||
const allowed = [allowedDir];
|
||||
|
||||
expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true);
|
||||
await expect(fs.access(testPath)).rejects.toThrow();
|
||||
|
||||
const parentDir = path.dirname(testPath);
|
||||
expect(isPathWithinAllowedDirectories(parentDir, allowed)).toBe(true);
|
||||
});
|
||||
|
||||
it('demonstrates symlink race condition allows writing outside allowed directories', async () => {
|
||||
const allowed = [allowedDir];
|
||||
|
||||
await expect(fs.access(testPath)).rejects.toThrow();
|
||||
expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true);
|
||||
|
||||
await fs.symlink(targetFile, testPath);
|
||||
await fs.writeFile(testPath, 'MODIFIED CONTENT', 'utf-8');
|
||||
|
||||
const targetContent = await fs.readFile(targetFile, 'utf-8');
|
||||
expect(targetContent).toBe('MODIFIED CONTENT');
|
||||
|
||||
const resolvedPath = await fs.realpath(testPath);
|
||||
expect(isPathWithinAllowedDirectories(resolvedPath, allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('shows timing differences between validation approaches', async () => {
|
||||
const allowed = [allowedDir];
|
||||
|
||||
const validation1 = isPathWithinAllowedDirectories(testPath, allowed);
|
||||
expect(validation1).toBe(true);
|
||||
|
||||
await fs.symlink(targetFile, testPath);
|
||||
|
||||
const resolvedPath = await fs.realpath(testPath);
|
||||
const validation2 = isPathWithinAllowedDirectories(resolvedPath, allowed);
|
||||
expect(validation2).toBe(false);
|
||||
|
||||
expect(validation1).not.toBe(validation2);
|
||||
});
|
||||
|
||||
it('validates directory creation timing', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const testDir = path.join(allowedDir, 'newdir');
|
||||
|
||||
expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true);
|
||||
|
||||
await fs.symlink(forbiddenDir, testDir);
|
||||
|
||||
expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true);
|
||||
|
||||
const resolved = await fs.realpath(testDir);
|
||||
expect(isPathWithinAllowedDirectories(resolved, allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('demonstrates exclusive file creation behavior', async () => {
|
||||
const allowed = [allowedDir];
|
||||
|
||||
await fs.symlink(targetFile, testPath);
|
||||
|
||||
await expect(fs.open(testPath, 'wx')).rejects.toThrow(/EEXIST/);
|
||||
|
||||
await fs.writeFile(testPath, 'NEW CONTENT', 'utf-8');
|
||||
const targetContent = await fs.readFile(targetFile, 'utf-8');
|
||||
expect(targetContent).toBe('NEW CONTENT');
|
||||
});
|
||||
|
||||
it('should use resolved parent paths for non-existent files', async () => {
|
||||
const allowed = [allowedDir];
|
||||
|
||||
const symlinkDir = path.join(allowedDir, 'link');
|
||||
await fs.symlink(forbiddenDir, symlinkDir);
|
||||
|
||||
const fileThroughSymlink = path.join(symlinkDir, 'newfile.txt');
|
||||
|
||||
expect(fileThroughSymlink.startsWith(allowedDir)).toBe(true);
|
||||
|
||||
const parentDir = path.dirname(fileThroughSymlink);
|
||||
const resolvedParent = await fs.realpath(parentDir);
|
||||
expect(isPathWithinAllowedDirectories(resolvedParent, allowed)).toBe(false);
|
||||
|
||||
const expectedSafePath = path.join(resolvedParent, path.basename(fileThroughSymlink));
|
||||
expect(isPathWithinAllowedDirectories(expectedSafePath, allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('demonstrates parent directory symlink traversal', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const deepPath = path.join(allowedDir, 'sub1', 'sub2', 'file.txt');
|
||||
|
||||
expect(isPathWithinAllowedDirectories(deepPath, allowed)).toBe(true);
|
||||
|
||||
const sub1Path = path.join(allowedDir, 'sub1');
|
||||
await fs.symlink(forbiddenDir, sub1Path);
|
||||
|
||||
await fs.mkdir(path.join(sub1Path, 'sub2'), { recursive: true });
|
||||
await fs.writeFile(deepPath, 'CONTENT', 'utf-8');
|
||||
|
||||
const realPath = await fs.realpath(deepPath);
|
||||
const realAllowedDir = await fs.realpath(allowedDir);
|
||||
const realForbiddenDir = await fs.realpath(forbiddenDir);
|
||||
|
||||
expect(realPath.startsWith(realAllowedDir)).toBe(false);
|
||||
expect(realPath.startsWith(realForbiddenDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should prevent race condition between validatePath and file operation', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const racePath = path.join(allowedDir, 'race-file.txt');
|
||||
const targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
|
||||
await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8');
|
||||
|
||||
// Path validation would pass (file doesn't exist, parent is in allowed dir)
|
||||
expect(await fs.access(racePath).then(() => false).catch(() => true)).toBe(true);
|
||||
expect(isPathWithinAllowedDirectories(racePath, allowed)).toBe(true);
|
||||
|
||||
// Race condition: symlink created after validation but before write
|
||||
await fs.symlink(targetFile, racePath);
|
||||
|
||||
// With exclusive write flag, write should fail on symlink
|
||||
await expect(
|
||||
fs.writeFile(racePath, 'NEW CONTENT', { encoding: 'utf-8', flag: 'wx' })
|
||||
).rejects.toThrow(/EEXIST/);
|
||||
|
||||
// Verify content unchanged
|
||||
const targetContent = await fs.readFile(targetFile, 'utf-8');
|
||||
expect(targetContent).toBe('ORIGINAL CONTENT');
|
||||
|
||||
// The symlink exists but write was blocked
|
||||
const actualWritePath = await fs.realpath(racePath);
|
||||
expect(actualWritePath).toBe(await fs.realpath(targetFile));
|
||||
expect(isPathWithinAllowedDirectories(actualWritePath, allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow overwrites to legitimate files within allowed directories', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const legitFile = path.join(allowedDir, 'legit-file.txt');
|
||||
|
||||
// Create a legitimate file
|
||||
await fs.writeFile(legitFile, 'ORIGINAL', 'utf-8');
|
||||
|
||||
// Opening with w should work for legitimate files
|
||||
const fd = await fs.open(legitFile, 'w');
|
||||
try {
|
||||
await fd.write('UPDATED', 0, 'utf-8');
|
||||
} finally {
|
||||
await fd.close();
|
||||
}
|
||||
|
||||
const content = await fs.readFile(legitFile, 'utf-8');
|
||||
expect(content).toBe('UPDATED');
|
||||
});
|
||||
|
||||
it('should handle symlinks that point within allowed directories', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const targetFile = path.join(allowedDir, 'target.txt');
|
||||
const symlinkPath = path.join(allowedDir, 'symlink.txt');
|
||||
|
||||
// Create target file within allowed directory
|
||||
await fs.writeFile(targetFile, 'TARGET CONTENT', 'utf-8');
|
||||
|
||||
// Create symlink pointing to allowed file
|
||||
await fs.symlink(targetFile, symlinkPath);
|
||||
|
||||
// Opening symlink with w follows it to the target
|
||||
const fd = await fs.open(symlinkPath, 'w');
|
||||
try {
|
||||
await fd.write('UPDATED VIA SYMLINK', 0, 'utf-8');
|
||||
} finally {
|
||||
await fd.close();
|
||||
}
|
||||
|
||||
// Both symlink and target should show updated content
|
||||
const symlinkContent = await fs.readFile(symlinkPath, 'utf-8');
|
||||
const targetContent = await fs.readFile(targetFile, 'utf-8');
|
||||
expect(symlinkContent).toBe('UPDATED VIA SYMLINK');
|
||||
expect(targetContent).toBe('UPDATED VIA SYMLINK');
|
||||
});
|
||||
|
||||
it('should prevent overwriting files through symlinks pointing outside allowed directories', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const legitFile = path.join(allowedDir, 'existing.txt');
|
||||
const targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
|
||||
// Create a legitimate file first
|
||||
await fs.writeFile(legitFile, 'LEGIT CONTENT', 'utf-8');
|
||||
|
||||
// Create target file in forbidden directory
|
||||
await fs.writeFile(targetFile, 'FORBIDDEN CONTENT', 'utf-8');
|
||||
|
||||
// Now replace the legitimate file with a symlink to forbidden location
|
||||
await fs.unlink(legitFile);
|
||||
await fs.symlink(targetFile, legitFile);
|
||||
|
||||
// Simulate the server's validation logic
|
||||
const stats = await fs.lstat(legitFile);
|
||||
expect(stats.isSymbolicLink()).toBe(true);
|
||||
|
||||
const realPath = await fs.realpath(legitFile);
|
||||
expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false);
|
||||
|
||||
// With atomic rename, symlinks are replaced not followed
|
||||
// So this test now demonstrates the protection
|
||||
|
||||
// Verify content remains unchanged
|
||||
const targetContent = await fs.readFile(targetFile, 'utf-8');
|
||||
expect(targetContent).toBe('FORBIDDEN CONTENT');
|
||||
});
|
||||
|
||||
it('demonstrates race condition in read operations', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const legitFile = path.join(allowedDir, 'readable.txt');
|
||||
const secretFile = path.join(forbiddenDir, 'secret.txt');
|
||||
|
||||
// Create legitimate file
|
||||
await fs.writeFile(legitFile, 'PUBLIC CONTENT', 'utf-8');
|
||||
|
||||
// Create secret file in forbidden directory
|
||||
await fs.writeFile(secretFile, 'SECRET CONTENT', 'utf-8');
|
||||
|
||||
// Step 1: validatePath would pass for legitimate file
|
||||
expect(isPathWithinAllowedDirectories(legitFile, allowed)).toBe(true);
|
||||
|
||||
// Step 2: Race condition - replace file with symlink after validation
|
||||
await fs.unlink(legitFile);
|
||||
await fs.symlink(secretFile, legitFile);
|
||||
|
||||
// Step 3: Read operation follows symlink to forbidden location
|
||||
const content = await fs.readFile(legitFile, 'utf-8');
|
||||
|
||||
// This shows the vulnerability - we read forbidden content
|
||||
expect(content).toBe('SECRET CONTENT');
|
||||
expect(isPathWithinAllowedDirectories(await fs.realpath(legitFile), allowed)).toBe(false);
|
||||
});
|
||||
|
||||
it('verifies rename does not follow symlinks', async () => {
|
||||
const allowed = [allowedDir];
|
||||
const tempFile = path.join(allowedDir, 'temp.txt');
|
||||
const targetSymlink = path.join(allowedDir, 'target-symlink.txt');
|
||||
const forbiddenTarget = path.join(forbiddenDir, 'forbidden-target.txt');
|
||||
|
||||
// Create forbidden target
|
||||
await fs.writeFile(forbiddenTarget, 'ORIGINAL CONTENT', 'utf-8');
|
||||
|
||||
// Create symlink pointing to forbidden location
|
||||
await fs.symlink(forbiddenTarget, targetSymlink);
|
||||
|
||||
// Write temp file
|
||||
await fs.writeFile(tempFile, 'NEW CONTENT', 'utf-8');
|
||||
|
||||
// Rename temp file to symlink path
|
||||
await fs.rename(tempFile, targetSymlink);
|
||||
|
||||
// Check what happened
|
||||
const symlinkExists = await fs.lstat(targetSymlink).then(() => true).catch(() => false);
|
||||
const isSymlink = symlinkExists && (await fs.lstat(targetSymlink)).isSymbolicLink();
|
||||
const targetContent = await fs.readFile(targetSymlink, 'utf-8');
|
||||
const forbiddenContent = await fs.readFile(forbiddenTarget, 'utf-8');
|
||||
|
||||
// Rename should replace the symlink with a regular file
|
||||
expect(isSymlink).toBe(false);
|
||||
expect(targetContent).toBe('NEW CONTENT');
|
||||
expect(forbiddenContent).toBe('ORIGINAL CONTENT'); // Unchanged
|
||||
});
|
||||
});
|
||||
});
|
||||
84
src/filesystem/__tests__/roots-utils.test.ts
Normal file
84
src/filesystem/__tests__/roots-utils.test.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
||||
import { getValidRootDirectories } from '../roots-utils.js';
|
||||
import { mkdtempSync, rmSync, mkdirSync, writeFileSync, realpathSync } from 'fs';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import type { Root } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
describe('getValidRootDirectories', () => {
|
||||
let testDir1: string;
|
||||
let testDir2: string;
|
||||
let testDir3: string;
|
||||
let testFile: string;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create test directories
|
||||
testDir1 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test1-')));
|
||||
testDir2 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test2-')));
|
||||
testDir3 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test3-')));
|
||||
|
||||
// Create a test file (not a directory)
|
||||
testFile = join(testDir1, 'test-file.txt');
|
||||
writeFileSync(testFile, 'test content');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Cleanup
|
||||
rmSync(testDir1, { recursive: true, force: true });
|
||||
rmSync(testDir2, { recursive: true, force: true });
|
||||
rmSync(testDir3, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('valid directory processing', () => {
|
||||
it('should process all URI formats and edge cases', async () => {
|
||||
const roots = [
|
||||
{ uri: `file://${testDir1}`, name: 'File URI' },
|
||||
{ uri: testDir2, name: 'Plain path' },
|
||||
{ uri: testDir3 } // Plain path without name property
|
||||
];
|
||||
|
||||
const result = await getValidRootDirectories(roots);
|
||||
|
||||
expect(result).toContain(testDir1);
|
||||
expect(result).toContain(testDir2);
|
||||
expect(result).toContain(testDir3);
|
||||
expect(result).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should normalize complex paths', async () => {
|
||||
const subDir = join(testDir1, 'subdir');
|
||||
mkdirSync(subDir);
|
||||
|
||||
const roots = [
|
||||
{ uri: `file://${testDir1}/./subdir/../subdir`, name: 'Complex Path' }
|
||||
];
|
||||
|
||||
const result = await getValidRootDirectories(roots);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBe(subDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
|
||||
it('should handle various error types', async () => {
|
||||
const nonExistentDir = join(tmpdir(), 'non-existent-directory-12345');
|
||||
const invalidPath = '\0invalid\0path'; // Null bytes cause different error types
|
||||
const roots = [
|
||||
{ uri: `file://${testDir1}`, name: 'Valid Dir' },
|
||||
{ uri: `file://${nonExistentDir}`, name: 'Non-existent Dir' },
|
||||
{ uri: `file://${testFile}`, name: 'File Not Dir' },
|
||||
{ uri: `file://${invalidPath}`, name: 'Invalid Path' }
|
||||
];
|
||||
|
||||
const result = await getValidRootDirectories(roots);
|
||||
|
||||
expect(result).toContain(testDir1);
|
||||
expect(result).not.toContain(nonExistentDir);
|
||||
expect(result).not.toContain(testFile);
|
||||
expect(result).not.toContain(invalidPath);
|
||||
expect(result).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,20 +6,28 @@ import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ToolSchema,
|
||||
RootsListChangedNotificationSchema,
|
||||
type Root,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from 'os';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { z } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { diffLines, createTwoFilesPatch } from 'diff';
|
||||
import { minimatch } from 'minimatch';
|
||||
import { isPathWithinAllowedDirectories } from './path-validation.js';
|
||||
import { getValidRootDirectories } from './roots-utils.js';
|
||||
|
||||
// Command line argument parsing
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length === 0) {
|
||||
console.error("Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]");
|
||||
process.exit(1);
|
||||
console.error("Usage: mcp-server-filesystem [allowed-directory] [additional-directories...]");
|
||||
console.error("Note: Allowed directories can be provided via:");
|
||||
console.error(" 1. Command-line arguments (shown above)");
|
||||
console.error(" 2. MCP roots protocol (if client supports it)");
|
||||
console.error("At least one directory must be provided by EITHER method for the server to operate.");
|
||||
}
|
||||
|
||||
// Normalize all paths consistently
|
||||
@@ -34,9 +42,21 @@ function expandHome(filepath: string): string {
|
||||
return filepath;
|
||||
}
|
||||
|
||||
// Store allowed directories in normalized form
|
||||
const allowedDirectories = args.map(dir =>
|
||||
normalizePath(path.resolve(expandHome(dir)))
|
||||
// Store allowed directories in normalized and resolved form
|
||||
let allowedDirectories = await Promise.all(
|
||||
args.map(async (dir) => {
|
||||
const expanded = expandHome(dir);
|
||||
const absolute = path.resolve(expanded);
|
||||
try {
|
||||
// Resolve symlinks in allowed directories during startup
|
||||
const resolved = await fs.realpath(absolute);
|
||||
return normalizePath(resolved);
|
||||
} catch (error) {
|
||||
// If we can't resolve (doesn't exist), use the normalized absolute path
|
||||
// This allows configuring allowed dirs that will be created later
|
||||
return normalizePath(absolute);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Validate that all directories exist and are accessible
|
||||
@@ -63,7 +83,7 @@ async function validatePath(requestedPath: string): Promise<string> {
|
||||
const normalizedRequested = normalizePath(absolute);
|
||||
|
||||
// Check if path is within allowed directories
|
||||
const isAllowed = allowedDirectories.some(dir => normalizedRequested.startsWith(dir));
|
||||
const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories);
|
||||
if (!isAllowed) {
|
||||
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
@@ -72,31 +92,34 @@ async function validatePath(requestedPath: string): Promise<string> {
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute);
|
||||
const normalizedReal = normalizePath(realPath);
|
||||
const isRealPathAllowed = allowedDirectories.some(dir => normalizedReal.startsWith(dir));
|
||||
if (!isRealPathAllowed) {
|
||||
throw new Error("Access denied - symlink target outside allowed directories");
|
||||
if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) {
|
||||
throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return realPath;
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
const parentDir = path.dirname(absolute);
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir);
|
||||
const normalizedParent = normalizePath(realParentPath);
|
||||
const isParentAllowed = allowedDirectories.some(dir => normalizedParent.startsWith(dir));
|
||||
if (!isParentAllowed) {
|
||||
throw new Error("Access denied - parent directory outside allowed directories");
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
const parentDir = path.dirname(absolute);
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir);
|
||||
const normalizedParent = normalizePath(realParentPath);
|
||||
if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) {
|
||||
throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return absolute;
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`);
|
||||
}
|
||||
return absolute;
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Schema definitions
|
||||
const ReadFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
tail: z.number().optional().describe('If provided, returns only the last N lines of the file'),
|
||||
head: z.number().optional().describe('If provided, returns only the first N lines of the file')
|
||||
});
|
||||
|
||||
const ReadMultipleFilesArgsSchema = z.object({
|
||||
@@ -127,6 +150,11 @@ const ListDirectoryArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
const ListDirectoryWithSizesArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
sortBy: z.enum(['name', 'size']).optional().default('name').describe('Sort entries by name or size'),
|
||||
});
|
||||
|
||||
const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
@@ -324,12 +352,125 @@ async function applyFileEdits(
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
|
||||
|
||||
if (!dryRun) {
|
||||
await fs.writeFile(filePath, modifiedContent, 'utf-8');
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, modifiedContent, 'utf-8');
|
||||
await fs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return formattedDiff;
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
function formatSize(bytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
if (bytes === 0) return '0 B';
|
||||
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
if (i === 0) return `${bytes} ${units[i]}`;
|
||||
|
||||
return `${(bytes / Math.pow(1024, i)).toFixed(2)} ${units[i]}`;
|
||||
}
|
||||
|
||||
// Memory-efficient implementation to get the last N lines of a file
|
||||
async function tailFile(filePath: string, numLines: number): Promise<string> {
|
||||
const CHUNK_SIZE = 1024; // Read 1KB at a time
|
||||
const stats = await fs.stat(filePath);
|
||||
const fileSize = stats.size;
|
||||
|
||||
if (fileSize === 0) return '';
|
||||
|
||||
// Open file for reading
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let position = fileSize;
|
||||
let chunk = Buffer.alloc(CHUNK_SIZE);
|
||||
let linesFound = 0;
|
||||
let remainingText = '';
|
||||
|
||||
// Read chunks from the end of the file until we have enough lines
|
||||
while (position > 0 && linesFound < numLines) {
|
||||
const size = Math.min(CHUNK_SIZE, position);
|
||||
position -= size;
|
||||
|
||||
const { bytesRead } = await fileHandle.read(chunk, 0, size, position);
|
||||
if (!bytesRead) break;
|
||||
|
||||
// Get the chunk as a string and prepend any remaining text from previous iteration
|
||||
const readData = chunk.slice(0, bytesRead).toString('utf-8');
|
||||
const chunkText = readData + remainingText;
|
||||
|
||||
// Split by newlines and count
|
||||
const chunkLines = normalizeLineEndings(chunkText).split('\n');
|
||||
|
||||
// If this isn't the end of the file, the first line is likely incomplete
|
||||
// Save it to prepend to the next chunk
|
||||
if (position > 0) {
|
||||
remainingText = chunkLines[0];
|
||||
chunkLines.shift(); // Remove the first (incomplete) line
|
||||
}
|
||||
|
||||
// Add lines to our result (up to the number we need)
|
||||
for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) {
|
||||
lines.unshift(chunkLines[i]);
|
||||
linesFound++;
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// New function to get the first N lines of a file
|
||||
async function headFile(filePath: string, numLines: number): Promise<string> {
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let buffer = '';
|
||||
let bytesRead = 0;
|
||||
const chunk = Buffer.alloc(1024); // 1KB buffer
|
||||
|
||||
// Read chunks and count lines until we have enough or reach EOF
|
||||
while (lines.length < numLines) {
|
||||
const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);
|
||||
if (result.bytesRead === 0) break; // End of file
|
||||
bytesRead += result.bytesRead;
|
||||
buffer += chunk.slice(0, result.bytesRead).toString('utf-8');
|
||||
|
||||
const newLineIndex = buffer.lastIndexOf('\n');
|
||||
if (newLineIndex !== -1) {
|
||||
const completeLines = buffer.slice(0, newLineIndex).split('\n');
|
||||
buffer = buffer.slice(newLineIndex + 1);
|
||||
for (const line of completeLines) {
|
||||
lines.push(line);
|
||||
if (lines.length >= numLines) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there is leftover content and we still need lines, add it
|
||||
if (buffer.length > 0 && lines.length < numLines) {
|
||||
lines.push(buffer);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Tool handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
@@ -340,7 +481,9 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
"Read the complete contents of a file from the file system. " +
|
||||
"Handles various text encodings and provides detailed error messages " +
|
||||
"if the file cannot be read. Use this tool when you need to examine " +
|
||||
"the contents of a single file. Only works within allowed directories.",
|
||||
"the contents of a single file. Use the 'head' parameter to read only " +
|
||||
"the first N lines of a file, or the 'tail' parameter to read only " +
|
||||
"the last N lines of a file. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
@@ -387,6 +530,15 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
"finding specific files within a directory. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "list_directory_with_sizes",
|
||||
description:
|
||||
"Get a detailed listing of all files and directories in a specified path, including sizes. " +
|
||||
"Results clearly distinguish between files and directories with [FILE] and [DIR] " +
|
||||
"prefixes. This tool is useful for understanding directory structure and " +
|
||||
"finding specific files within a directory. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ListDirectoryWithSizesArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "directory_tree",
|
||||
description:
|
||||
@@ -427,8 +579,8 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
{
|
||||
name: "list_allowed_directories",
|
||||
description:
|
||||
"Returns the list of directories that this server is allowed to access. " +
|
||||
"Use this to understand which directories are available before trying to access files.",
|
||||
"Returns the list of root directories that this server is allowed to access. " +
|
||||
"Use this to understand which directories are available before trying to access files. ",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
@@ -451,6 +603,27 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
|
||||
if (parsed.data.head && parsed.data.tail) {
|
||||
throw new Error("Cannot specify both head and tail parameters simultaneously");
|
||||
}
|
||||
|
||||
if (parsed.data.tail) {
|
||||
// Use memory-efficient tail implementation for large files
|
||||
const tailContent = await tailFile(validPath, parsed.data.tail);
|
||||
return {
|
||||
content: [{ type: "text", text: tailContent }],
|
||||
};
|
||||
}
|
||||
|
||||
if (parsed.data.head) {
|
||||
// Use memory-efficient head implementation for large files
|
||||
const headContent = await headFile(validPath, parsed.data.head);
|
||||
return {
|
||||
content: [{ type: "text", text: headContent }],
|
||||
};
|
||||
}
|
||||
|
||||
const content = await fs.readFile(validPath, "utf-8");
|
||||
return {
|
||||
content: [{ type: "text", text: content }],
|
||||
@@ -485,7 +658,31 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
await fs.writeFile(validPath, parsed.data.content, "utf-8");
|
||||
|
||||
try {
|
||||
// Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists,
|
||||
// preventing writes through pre-existing symlinks
|
||||
await fs.writeFile(validPath, parsed.data.content, { encoding: "utf-8", flag: 'wx' });
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${validPath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, parsed.data.content, 'utf-8');
|
||||
await fs.rename(tempPath, validPath);
|
||||
} catch (renameError) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw renameError;
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }],
|
||||
};
|
||||
@@ -530,11 +727,77 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
};
|
||||
}
|
||||
|
||||
case "directory_tree": {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
|
||||
case "list_directory_with_sizes": {
|
||||
const parsed = ListDirectoryWithSizesArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for list_directory_with_sizes: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true });
|
||||
|
||||
// Get detailed information for each entry
|
||||
const detailedEntries = await Promise.all(
|
||||
entries.map(async (entry) => {
|
||||
const entryPath = path.join(validPath, entry.name);
|
||||
try {
|
||||
const stats = await fs.stat(entryPath);
|
||||
return {
|
||||
name: entry.name,
|
||||
isDirectory: entry.isDirectory(),
|
||||
size: stats.size,
|
||||
mtime: stats.mtime
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
name: entry.name,
|
||||
isDirectory: entry.isDirectory(),
|
||||
size: 0,
|
||||
mtime: new Date(0)
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Sort entries based on sortBy parameter
|
||||
const sortedEntries = [...detailedEntries].sort((a, b) => {
|
||||
if (parsed.data.sortBy === 'size') {
|
||||
return b.size - a.size; // Descending by size
|
||||
}
|
||||
// Default sort by name
|
||||
return a.name.localeCompare(b.name);
|
||||
});
|
||||
|
||||
// Format the output
|
||||
const formattedEntries = sortedEntries.map(entry =>
|
||||
`${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${
|
||||
entry.isDirectory ? "" : formatSize(entry.size).padStart(10)
|
||||
}`
|
||||
);
|
||||
|
||||
// Add summary
|
||||
const totalFiles = detailedEntries.filter(e => !e.isDirectory).length;
|
||||
const totalDirs = detailedEntries.filter(e => e.isDirectory).length;
|
||||
const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0);
|
||||
|
||||
const summary = [
|
||||
"",
|
||||
`Total: ${totalFiles} files, ${totalDirs} directories`,
|
||||
`Combined size: ${formatSize(totalSize)}`
|
||||
];
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: [...formattedEntries, ...summary].join("\n")
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
case "directory_tree": {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
@@ -633,12 +896,62 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Updates allowed directories based on MCP client roots
|
||||
async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) {
|
||||
const validatedRootDirs = await getValidRootDirectories(requestedRoots);
|
||||
if (validatedRootDirs.length > 0) {
|
||||
allowedDirectories = [...validatedRootDirs];
|
||||
console.error(`Updated allowed directories from MCP roots: ${validatedRootDirs.length} valid directories`);
|
||||
} else {
|
||||
console.error("No valid root directories provided by client");
|
||||
}
|
||||
}
|
||||
|
||||
// Handles dynamic roots updates during runtime, when client sends "roots/list_changed" notification, server fetches the updated roots and replaces all allowed directories with the new roots.
|
||||
server.setNotificationHandler(RootsListChangedNotificationSchema, async () => {
|
||||
try {
|
||||
// Request the updated roots list from the client
|
||||
const response = await server.listRoots();
|
||||
if (response && 'roots' in response) {
|
||||
await updateAllowedDirectoriesFromRoots(response.roots);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to request roots from client:", error instanceof Error ? error.message : String(error));
|
||||
}
|
||||
});
|
||||
|
||||
// Handles post-initialization setup, specifically checking for and fetching MCP roots.
|
||||
server.oninitialized = async () => {
|
||||
const clientCapabilities = server.getClientCapabilities();
|
||||
|
||||
if (clientCapabilities?.roots) {
|
||||
try {
|
||||
const response = await server.listRoots();
|
||||
if (response && 'roots' in response) {
|
||||
await updateAllowedDirectoriesFromRoots(response.roots);
|
||||
} else {
|
||||
console.error("Client returned no roots set, keeping current settings");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to request initial roots from client:", error instanceof Error ? error.message : String(error));
|
||||
}
|
||||
} else {
|
||||
if (allowedDirectories.length > 0) {
|
||||
console.error("Client does not support MCP Roots, using allowed directories set from server args:", allowedDirectories);
|
||||
}else{
|
||||
throw new Error(`Server cannot operate: No allowed directories available. Server was started without command-line directories and client either does not support MCP roots protocol or provided empty roots. Please either: 1) Start server with directory arguments, or 2) Use a client that supports MCP roots protocol and provides valid root directories.`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start server
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Secure MCP Filesystem Server running on stdio");
|
||||
console.error("Allowed directories:", allowedDirectories);
|
||||
if (allowedDirectories.length === 0) {
|
||||
console.error("Started without allowed directories - waiting for client to provide roots via MCP protocol");
|
||||
}
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
|
||||
23
src/filesystem/jest.config.cjs
Normal file
23
src/filesystem/jest.config.cjs
Normal file
@@ -0,0 +1,23 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
moduleNameMapper: {
|
||||
'^(\\.{1,2}/.*)\\.js$': '$1',
|
||||
},
|
||||
transform: {
|
||||
'^.+\\.tsx?$': [
|
||||
'ts-jest',
|
||||
{
|
||||
useESM: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
collectCoverageFrom: [
|
||||
'**/*.ts',
|
||||
'!**/__tests__/**',
|
||||
'!**/dist/**',
|
||||
],
|
||||
}
|
||||
@@ -16,20 +16,26 @@
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
"watch": "tsc --watch",
|
||||
"test": "jest --config=jest.config.cjs --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"@modelcontextprotocol/sdk": "^1.12.3",
|
||||
"diff": "^5.1.0",
|
||||
"glob": "^10.3.10",
|
||||
"minimatch": "^10.0.1",
|
||||
"zod-to-json-schema": "^3.23.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^29.7.0",
|
||||
"@types/diff": "^5.0.9",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/minimatch": "^5.1.2",
|
||||
"@types/node": "^22",
|
||||
"jest": "^29.7.0",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.3.3"
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.2"
|
||||
}
|
||||
}
|
||||
104
src/filesystem/path-utils.ts
Normal file
104
src/filesystem/path-utils.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import path from "path";
|
||||
import os from 'os';
|
||||
|
||||
/**
|
||||
* Converts WSL or Unix-style Windows paths to Windows format
|
||||
* @param p The path to convert
|
||||
* @returns Converted Windows path
|
||||
*/
|
||||
export function convertToWindowsPath(p: string): string {
|
||||
// Handle WSL paths (/mnt/c/...)
|
||||
if (p.startsWith('/mnt/')) {
|
||||
const driveLetter = p.charAt(5).toUpperCase();
|
||||
const pathPart = p.slice(6).replace(/\//g, '\\');
|
||||
return `${driveLetter}:${pathPart}`;
|
||||
}
|
||||
|
||||
// Handle Unix-style Windows paths (/c/...)
|
||||
if (p.match(/^\/[a-zA-Z]\//)) {
|
||||
const driveLetter = p.charAt(1).toUpperCase();
|
||||
const pathPart = p.slice(2).replace(/\//g, '\\');
|
||||
return `${driveLetter}:${pathPart}`;
|
||||
}
|
||||
|
||||
// Handle standard Windows paths, ensuring backslashes
|
||||
if (p.match(/^[a-zA-Z]:/)) {
|
||||
return p.replace(/\//g, '\\');
|
||||
}
|
||||
|
||||
// Leave non-Windows paths unchanged
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes path by standardizing format while preserving OS-specific behavior
|
||||
* @param p The path to normalize
|
||||
* @returns Normalized path
|
||||
*/
|
||||
export function normalizePath(p: string): string {
|
||||
// Remove any surrounding quotes and whitespace
|
||||
p = p.trim().replace(/^["']|["']$/g, '');
|
||||
|
||||
// Check if this is a Unix path (starts with / but not a Windows or WSL path)
|
||||
const isUnixPath = p.startsWith('/') &&
|
||||
!p.match(/^\/mnt\/[a-z]\//i) &&
|
||||
!p.match(/^\/[a-zA-Z]\//);
|
||||
|
||||
if (isUnixPath) {
|
||||
// For Unix paths, just normalize without converting to Windows format
|
||||
// Replace double slashes with single slashes and remove trailing slashes
|
||||
return p.replace(/\/+/g, '/').replace(/\/+$/, '');
|
||||
}
|
||||
|
||||
// Convert WSL or Unix-style Windows paths to Windows format
|
||||
p = convertToWindowsPath(p);
|
||||
|
||||
// Handle double backslashes, preserving leading UNC \\
|
||||
if (p.startsWith('\\\\')) {
|
||||
// For UNC paths, first normalize any excessive leading backslashes to exactly \\
|
||||
// Then normalize double backslashes in the rest of the path
|
||||
let uncPath = p;
|
||||
// Replace multiple leading backslashes with exactly two
|
||||
uncPath = uncPath.replace(/^\\{2,}/, '\\\\');
|
||||
// Now normalize any remaining double backslashes in the rest of the path
|
||||
const restOfPath = uncPath.substring(2).replace(/\\\\/g, '\\');
|
||||
p = '\\\\' + restOfPath;
|
||||
} else {
|
||||
// For non-UNC paths, normalize all double backslashes
|
||||
p = p.replace(/\\\\/g, '\\');
|
||||
}
|
||||
|
||||
// Use Node's path normalization, which handles . and .. segments
|
||||
let normalized = path.normalize(p);
|
||||
|
||||
// Fix UNC paths after normalization (path.normalize can remove a leading backslash)
|
||||
if (p.startsWith('\\\\') && !normalized.startsWith('\\\\')) {
|
||||
normalized = '\\' + normalized;
|
||||
}
|
||||
|
||||
// Handle Windows paths: convert slashes and ensure drive letter is capitalized
|
||||
if (normalized.match(/^[a-zA-Z]:/)) {
|
||||
let result = normalized.replace(/\//g, '\\');
|
||||
// Capitalize drive letter if present
|
||||
if (/^[a-z]:/.test(result)) {
|
||||
result = result.charAt(0).toUpperCase() + result.slice(1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// For all other paths (including relative paths), convert forward slashes to backslashes
|
||||
// This ensures relative paths like "some/relative/path" become "some\\relative\\path"
|
||||
return normalized.replace(/\//g, '\\');
|
||||
}
|
||||
|
||||
/**
|
||||
* Expands home directory tildes in paths
|
||||
* @param filepath The path to expand
|
||||
* @returns Expanded path
|
||||
*/
|
||||
export function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1));
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
77
src/filesystem/path-validation.ts
Normal file
77
src/filesystem/path-validation.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* Checks if an absolute path is within any of the allowed directories.
|
||||
*
|
||||
* @param absolutePath - The absolute path to check (will be normalized)
|
||||
* @param allowedDirectories - Array of absolute allowed directory paths (will be normalized)
|
||||
* @returns true if the path is within an allowed directory, false otherwise
|
||||
* @throws Error if given relative paths after normalization
|
||||
*/
|
||||
export function isPathWithinAllowedDirectories(absolutePath: string, allowedDirectories: string[]): boolean {
|
||||
// Type validation
|
||||
if (typeof absolutePath !== 'string' || !Array.isArray(allowedDirectories)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Reject empty inputs
|
||||
if (!absolutePath || allowedDirectories.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Reject null bytes (forbidden in paths)
|
||||
if (absolutePath.includes('\x00')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Normalize the input path
|
||||
let normalizedPath: string;
|
||||
try {
|
||||
normalizedPath = path.resolve(path.normalize(absolutePath));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify it's absolute after normalization
|
||||
if (!path.isAbsolute(normalizedPath)) {
|
||||
throw new Error('Path must be absolute after normalization');
|
||||
}
|
||||
|
||||
// Check against each allowed directory
|
||||
return allowedDirectories.some(dir => {
|
||||
if (typeof dir !== 'string' || !dir) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Reject null bytes in allowed dirs
|
||||
if (dir.includes('\x00')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Normalize the allowed directory
|
||||
let normalizedDir: string;
|
||||
try {
|
||||
normalizedDir = path.resolve(path.normalize(dir));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify allowed directory is absolute after normalization
|
||||
if (!path.isAbsolute(normalizedDir)) {
|
||||
throw new Error('Allowed directories must be absolute paths after normalization');
|
||||
}
|
||||
|
||||
// Check if normalizedPath is within normalizedDir
|
||||
// Path is inside if it's the same or a subdirectory
|
||||
if (normalizedPath === normalizedDir) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Special case for root directory to avoid double slash
|
||||
if (normalizedDir === path.sep) {
|
||||
return normalizedPath.startsWith(path.sep);
|
||||
}
|
||||
|
||||
return normalizedPath.startsWith(normalizedDir + path.sep);
|
||||
});
|
||||
}
|
||||
76
src/filesystem/roots-utils.ts
Normal file
76
src/filesystem/roots-utils.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { promises as fs, type Stats } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { normalizePath } from './path-utils.js';
|
||||
import type { Root } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
/**
|
||||
* Converts a root URI to a normalized directory path with basic security validation.
|
||||
* @param rootUri - File URI (file://...) or plain directory path
|
||||
* @returns Promise resolving to validated path or null if invalid
|
||||
*/
|
||||
async function parseRootUri(rootUri: string): Promise<string | null> {
|
||||
try {
|
||||
const rawPath = rootUri.startsWith('file://') ? rootUri.slice(7) : rootUri;
|
||||
const expandedPath = rawPath.startsWith('~/') || rawPath === '~'
|
||||
? path.join(os.homedir(), rawPath.slice(1))
|
||||
: rawPath;
|
||||
const absolutePath = path.resolve(expandedPath);
|
||||
const resolvedPath = await fs.realpath(absolutePath);
|
||||
return normalizePath(resolvedPath);
|
||||
} catch {
|
||||
return null; // Path doesn't exist or other error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats error message for directory validation failures.
|
||||
* @param dir - Directory path that failed validation
|
||||
* @param error - Error that occurred during validation
|
||||
* @param reason - Specific reason for failure
|
||||
* @returns Formatted error message
|
||||
*/
|
||||
function formatDirectoryError(dir: string, error?: unknown, reason?: string): string {
|
||||
if (reason) {
|
||||
return `Skipping ${reason}: ${dir}`;
|
||||
}
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return `Skipping invalid directory: ${dir} due to error: ${message}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves requested root directories from MCP root specifications.
|
||||
*
|
||||
* Converts root URI specifications (file:// URIs or plain paths) into normalized
|
||||
* directory paths, validating that each path exists and is a directory.
|
||||
* Includes symlink resolution for security.
|
||||
*
|
||||
* @param requestedRoots - Array of root specifications with URI and optional name
|
||||
* @returns Promise resolving to array of validated directory paths
|
||||
*/
|
||||
export async function getValidRootDirectories(
|
||||
requestedRoots: readonly Root[]
|
||||
): Promise<string[]> {
|
||||
const validatedDirectories: string[] = [];
|
||||
|
||||
for (const requestedRoot of requestedRoots) {
|
||||
const resolvedPath = await parseRootUri(requestedRoot.uri);
|
||||
if (!resolvedPath) {
|
||||
console.error(formatDirectoryError(requestedRoot.uri, undefined, 'invalid path or inaccessible'));
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats: Stats = await fs.stat(resolvedPath);
|
||||
if (stats.isDirectory()) {
|
||||
validatedDirectories.push(resolvedPath);
|
||||
} else {
|
||||
console.error(formatDirectoryError(resolvedPath, undefined, 'non-directory root'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(formatDirectoryError(resolvedPath, error));
|
||||
}
|
||||
}
|
||||
|
||||
return validatedDirectories;
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
COPY src/gdrive /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
COPY src/gdrive/replace_open.sh /replace_open.sh
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
RUN sh /replace_open.sh
|
||||
|
||||
RUN rm /replace_open.sh
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,95 +0,0 @@
|
||||
# Google Drive server
|
||||
|
||||
This MCP server integrates with Google Drive to allow listing, reading, and searching over files.
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **search**
|
||||
- Search for files in Google Drive
|
||||
- Input: `query` (string): Search query
|
||||
- Returns file names and MIME types of matching files
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides access to Google Drive files:
|
||||
|
||||
- **Files** (`gdrive:///<file_id>`)
|
||||
- Supports all file types
|
||||
- Google Workspace files are automatically exported:
|
||||
- Docs → Markdown
|
||||
- Sheets → CSV
|
||||
- Presentations → Plain text
|
||||
- Drawings → PNG
|
||||
- Other files are provided in their native format
|
||||
|
||||
## Getting started
|
||||
|
||||
1. [Create a new Google Cloud project](https://console.cloud.google.com/projectcreate)
|
||||
2. [Enable the Google Drive API](https://console.cloud.google.com/workspace-api/products)
|
||||
3. [Configure an OAuth consent screen](https://console.cloud.google.com/apis/credentials/consent) ("internal" is fine for testing)
|
||||
4. Add OAuth scope `https://www.googleapis.com/auth/drive.readonly`
|
||||
5. [Create an OAuth Client ID](https://console.cloud.google.com/apis/credentials/oauthclient) for application type "Desktop App"
|
||||
6. Download the JSON file of your client's OAuth keys
|
||||
7. Rename the key file to `gcp-oauth.keys.json` and place into the root of this repo (i.e. `servers/gcp-oauth.keys.json`)
|
||||
|
||||
Make sure to build the server with either `npm run build` or `npm run watch`.
|
||||
|
||||
### Authentication
|
||||
|
||||
To authenticate and save credentials:
|
||||
|
||||
1. Run the server with the `auth` argument: `node ./dist auth`
|
||||
2. This will open an authentication flow in your system browser
|
||||
3. Complete the authentication process
|
||||
4. Credentials will be saved in the root of this repo (i.e. `servers/.gdrive-server-credentials.json`)
|
||||
|
||||
### Usage with Desktop App
|
||||
|
||||
To integrate this server with the desktop app, add the following to your app's server configuration:
|
||||
|
||||
#### Docker
|
||||
|
||||
Authentication:
|
||||
|
||||
Assuming you have completed setting up the OAuth application on Google Cloud, you can now auth the server with the following command, replacing `/path/to/gcp-oauth.keys.json` with the path to your OAuth keys file:
|
||||
|
||||
```bash
|
||||
docker run -i --rm --mount type=bind,source=/path/to/gcp-oauth.keys.json,target=/gcp-oauth.keys.json -v mcp-gdrive:/gdrive-server -e GDRIVE_OAUTH_PATH=/gcp-oauth.keys.json -e "GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json" -p 3000:3000 mcp/gdrive auth
|
||||
```
|
||||
|
||||
The command will print the URL to open in your browser. Open this URL in your browser and complete the authentication process. The credentials will be saved in the `mcp-gdrive` volume.
|
||||
|
||||
Once authenticated, you can use the server in your app's server configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"gdrive": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "-v", "mcp-gdrive:/gdrive-server", "-e", "GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json", "mcp/gdrive"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"gdrive": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-gdrive"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,219 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { authenticate } from "@google-cloud/local-auth";
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListResourcesRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ReadResourceRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fs from "fs";
|
||||
import { google } from "googleapis";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const drive = google.drive("v3");
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: "example-servers/gdrive",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
resources: {},
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListResourcesRequestSchema, async (request) => {
|
||||
const pageSize = 10;
|
||||
const params: any = {
|
||||
pageSize,
|
||||
fields: "nextPageToken, files(id, name, mimeType)",
|
||||
};
|
||||
|
||||
if (request.params?.cursor) {
|
||||
params.pageToken = request.params.cursor;
|
||||
}
|
||||
|
||||
const res = await drive.files.list(params);
|
||||
const files = res.data.files!;
|
||||
|
||||
return {
|
||||
resources: files.map((file) => ({
|
||||
uri: `gdrive:///${file.id}`,
|
||||
mimeType: file.mimeType,
|
||||
name: file.name,
|
||||
})),
|
||||
nextCursor: res.data.nextPageToken,
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
const fileId = request.params.uri.replace("gdrive:///", "");
|
||||
|
||||
// First get file metadata to check mime type
|
||||
const file = await drive.files.get({
|
||||
fileId,
|
||||
fields: "mimeType",
|
||||
});
|
||||
|
||||
// For Google Docs/Sheets/etc we need to export
|
||||
if (file.data.mimeType?.startsWith("application/vnd.google-apps")) {
|
||||
let exportMimeType: string;
|
||||
switch (file.data.mimeType) {
|
||||
case "application/vnd.google-apps.document":
|
||||
exportMimeType = "text/markdown";
|
||||
break;
|
||||
case "application/vnd.google-apps.spreadsheet":
|
||||
exportMimeType = "text/csv";
|
||||
break;
|
||||
case "application/vnd.google-apps.presentation":
|
||||
exportMimeType = "text/plain";
|
||||
break;
|
||||
case "application/vnd.google-apps.drawing":
|
||||
exportMimeType = "image/png";
|
||||
break;
|
||||
default:
|
||||
exportMimeType = "text/plain";
|
||||
}
|
||||
|
||||
const res = await drive.files.export(
|
||||
{ fileId, mimeType: exportMimeType },
|
||||
{ responseType: "text" },
|
||||
);
|
||||
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: request.params.uri,
|
||||
mimeType: exportMimeType,
|
||||
text: res.data,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
// For regular files download content
|
||||
const res = await drive.files.get(
|
||||
{ fileId, alt: "media" },
|
||||
{ responseType: "arraybuffer" },
|
||||
);
|
||||
const mimeType = file.data.mimeType || "application/octet-stream";
|
||||
if (mimeType.startsWith("text/") || mimeType === "application/json") {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: request.params.uri,
|
||||
mimeType: mimeType,
|
||||
text: Buffer.from(res.data as ArrayBuffer).toString("utf-8"),
|
||||
},
|
||||
],
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: request.params.uri,
|
||||
mimeType: mimeType,
|
||||
blob: Buffer.from(res.data as ArrayBuffer).toString("base64"),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "search",
|
||||
description: "Search for files in Google Drive",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description: "Search query",
|
||||
},
|
||||
},
|
||||
required: ["query"],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "search") {
|
||||
const userQuery = request.params.arguments?.query as string;
|
||||
const escapedQuery = userQuery.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
|
||||
const formattedQuery = `fullText contains '${escapedQuery}'`;
|
||||
|
||||
const res = await drive.files.list({
|
||||
q: formattedQuery,
|
||||
pageSize: 10,
|
||||
fields: "files(id, name, mimeType, modifiedTime, size)",
|
||||
});
|
||||
|
||||
const fileList = res.data.files
|
||||
?.map((file: any) => `${file.name} (${file.mimeType})`)
|
||||
.join("\n");
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Found ${res.data.files?.length ?? 0} files:\n${fileList}`,
|
||||
},
|
||||
],
|
||||
isError: false,
|
||||
};
|
||||
}
|
||||
throw new Error("Tool not found");
|
||||
});
|
||||
|
||||
const credentialsPath = process.env.GDRIVE_CREDENTIALS_PATH || path.join(
|
||||
path.dirname(fileURLToPath(import.meta.url)),
|
||||
"../../../.gdrive-server-credentials.json",
|
||||
);
|
||||
|
||||
async function authenticateAndSaveCredentials() {
|
||||
console.log("Launching auth flow…");
|
||||
const auth = await authenticate({
|
||||
keyfilePath: process.env.GDRIVE_OAUTH_PATH || path.join(
|
||||
path.dirname(fileURLToPath(import.meta.url)),
|
||||
"../../../gcp-oauth.keys.json",
|
||||
),
|
||||
scopes: ["https://www.googleapis.com/auth/drive.readonly"],
|
||||
});
|
||||
fs.writeFileSync(credentialsPath, JSON.stringify(auth.credentials));
|
||||
console.log("Credentials saved. You can now run the server.");
|
||||
}
|
||||
|
||||
async function loadCredentialsAndRunServer() {
|
||||
if (!fs.existsSync(credentialsPath)) {
|
||||
console.error(
|
||||
"Credentials not found. Please run with 'auth' argument first.",
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const credentials = JSON.parse(fs.readFileSync(credentialsPath, "utf-8"));
|
||||
const auth = new google.auth.OAuth2();
|
||||
auth.setCredentials(credentials);
|
||||
google.options({ auth });
|
||||
|
||||
console.error("Credentials loaded. Starting server.");
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
}
|
||||
|
||||
if (process.argv[2] === "auth") {
|
||||
authenticateAndSaveCredentials().catch(console.error);
|
||||
} else {
|
||||
loadCredentialsAndRunServer().catch(console.error);
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-gdrive",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for interacting with Google Drive",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-gdrive": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/local-auth": "^3.0.1",
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"googleapis": "^144.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
# Basic script to replace opn(authorizeUrl, { wait: false }).then(cp => cp.unref()); with process.stdout.write(`Open this URL in your browser: ${authorizeUrl}`);
|
||||
|
||||
sed -i 's/opn(authorizeUrl, { wait: false }).then(cp => cp.unref());/process.stderr.write(`Open this URL in your browser: ${authorizeUrl}\n`);/' node_modules/@google-cloud/local-auth/build/src/index.js
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -16,14 +16,16 @@ Please note that mcp-server-git is currently in early development. The functiona
|
||||
|
||||
2. `git_diff_unstaged`
|
||||
- Shows changes in working directory not yet staged
|
||||
- Input:
|
||||
- Inputs:
|
||||
- `repo_path` (string): Path to Git repository
|
||||
- `context_lines` (number, optional): Number of context lines to show (default: 3)
|
||||
- Returns: Diff output of unstaged changes
|
||||
|
||||
3. `git_diff_staged`
|
||||
- Shows changes that are staged for commit
|
||||
- Input:
|
||||
- Inputs:
|
||||
- `repo_path` (string): Path to Git repository
|
||||
- `context_lines` (number, optional): Number of context lines to show (default: 3)
|
||||
- Returns: Diff output of staged changes
|
||||
|
||||
4. `git_diff`
|
||||
@@ -31,6 +33,7 @@ Please note that mcp-server-git is currently in early development. The functiona
|
||||
- Inputs:
|
||||
- `repo_path` (string): Path to Git repository
|
||||
- `target` (string): Target branch or commit to compare with
|
||||
- `context_lines` (number, optional): Number of context lines to show (default: 3)
|
||||
- Returns: Diff output comparing current state with target
|
||||
|
||||
5. `git_commit`
|
||||
@@ -85,6 +88,15 @@ Please note that mcp-server-git is currently in early development. The functiona
|
||||
- `repo_path` (string): Path to directory to initialize git repo
|
||||
- Returns: Confirmation of repository initialization
|
||||
|
||||
13. `git_branch`
|
||||
- List Git branches
|
||||
- Inputs:
|
||||
- `repo_path` (string): Path to the Git repository.
|
||||
- `branch_type` (string): Whether to list local branches ('local'), remote branches ('remote') or all branches('all').
|
||||
- `contains` (string, optional): The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified
|
||||
- `not_contains` (string, optional): The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified
|
||||
- Returns: List of branches
|
||||
|
||||
## Installation
|
||||
|
||||
### Using uv (recommended)
|
||||
@@ -153,6 +165,54 @@ Add this to your `claude_desktop_config.json`:
|
||||
```
|
||||
</details>
|
||||
|
||||
### Usage with VS Code
|
||||
|
||||
For quick installation, use one of the one-click install buttons below...
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D&quality=insiders)
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D&quality=insiders)
|
||||
|
||||
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
|
||||
|
||||
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
||||
|
||||
> Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"git": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-git"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For Docker installation:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"git": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"--rm",
|
||||
"-i",
|
||||
"--mount", "type=bind,src=${workspaceFolder},dst=/workspace",
|
||||
"mcp/git"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Usage with [Zed](https://github.com/zed-industries/zed)
|
||||
|
||||
Add to your Zed settings.json:
|
||||
@@ -187,6 +247,24 @@ Add to your Zed settings.json:
|
||||
```
|
||||
</details>
|
||||
|
||||
### Usage with [Zencoder](https://zencoder.ai)
|
||||
|
||||
1. Go to the Zencoder menu (...)
|
||||
2. From the dropdown menu, select `Agent Tools`
|
||||
3. Click on the `Add Custom MCP`
|
||||
4. Add the name (i.e. git) and server configuration from below, and make sure to hit the `Install` button
|
||||
|
||||
<details>
|
||||
<summary>Using uvx</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-git", "--repository", "path/to/git/repo"]
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
## Debugging
|
||||
|
||||
You can use the MCP inspector to debug the server. For uvx installations:
|
||||
@@ -240,12 +318,13 @@ If you are doing local development, there are two ways to test your changes:
|
||||
"mcpServers": {
|
||||
"git": {
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"args": [
|
||||
"--directory",
|
||||
"/<path to mcp-servers>/mcp-servers/src/git",
|
||||
"run",
|
||||
"mcp-server-git"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -36,4 +36,4 @@ dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3", "pytest>=8.0.0"]
|
||||
testpaths = ["tests"]
|
||||
python_files = "test_*.py"
|
||||
python_classes = "Test*"
|
||||
python_functions = "test_*"
|
||||
python_functions = "test_*"
|
||||
|
||||
0
src/git/src/mcp_server_git/py.typed
Normal file
0
src/git/src/mcp_server_git/py.typed
Normal file
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Sequence
|
||||
from typing import Sequence, Optional
|
||||
from mcp.server import Server
|
||||
from mcp.server.session import ServerSession
|
||||
from mcp.server.stdio import stdio_server
|
||||
@@ -13,20 +13,26 @@ from mcp.types import (
|
||||
)
|
||||
from enum import Enum
|
||||
import git
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Default number of context lines to show in diff output
|
||||
DEFAULT_CONTEXT_LINES = 3
|
||||
|
||||
class GitStatus(BaseModel):
|
||||
repo_path: str
|
||||
|
||||
class GitDiffUnstaged(BaseModel):
|
||||
repo_path: str
|
||||
context_lines: int = DEFAULT_CONTEXT_LINES
|
||||
|
||||
class GitDiffStaged(BaseModel):
|
||||
repo_path: str
|
||||
context_lines: int = DEFAULT_CONTEXT_LINES
|
||||
|
||||
class GitDiff(BaseModel):
|
||||
repo_path: str
|
||||
target: str
|
||||
context_lines: int = DEFAULT_CONTEXT_LINES
|
||||
|
||||
class GitCommit(BaseModel):
|
||||
repo_path: str
|
||||
@@ -59,6 +65,24 @@ class GitShow(BaseModel):
|
||||
class GitInit(BaseModel):
|
||||
repo_path: str
|
||||
|
||||
class GitBranch(BaseModel):
|
||||
repo_path: str = Field(
|
||||
...,
|
||||
description="The path to the Git repository.",
|
||||
)
|
||||
branch_type: str = Field(
|
||||
...,
|
||||
description="Whether to list local branches ('local'), remote branches ('remote') or all branches('all').",
|
||||
)
|
||||
contains: Optional[str] = Field(
|
||||
None,
|
||||
description="The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified",
|
||||
)
|
||||
not_contains: Optional[str] = Field(
|
||||
None,
|
||||
description="The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified",
|
||||
)
|
||||
|
||||
class GitTools(str, Enum):
|
||||
STATUS = "git_status"
|
||||
DIFF_UNSTAGED = "git_diff_unstaged"
|
||||
@@ -72,18 +96,19 @@ class GitTools(str, Enum):
|
||||
CHECKOUT = "git_checkout"
|
||||
SHOW = "git_show"
|
||||
INIT = "git_init"
|
||||
BRANCH = "git_branch"
|
||||
|
||||
def git_status(repo: git.Repo) -> str:
|
||||
return repo.git.status()
|
||||
|
||||
def git_diff_unstaged(repo: git.Repo) -> str:
|
||||
return repo.git.diff()
|
||||
def git_diff_unstaged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
|
||||
return repo.git.diff(f"--unified={context_lines}")
|
||||
|
||||
def git_diff_staged(repo: git.Repo) -> str:
|
||||
return repo.git.diff("--cached")
|
||||
def git_diff_staged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
|
||||
return repo.git.diff(f"--unified={context_lines}", "--cached")
|
||||
|
||||
def git_diff(repo: git.Repo, target: str) -> str:
|
||||
return repo.git.diff(target)
|
||||
def git_diff(repo: git.Repo, target: str, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
|
||||
return repo.git.diff(f"--unified={context_lines}", target)
|
||||
|
||||
def git_commit(repo: git.Repo, message: str) -> str:
|
||||
commit = repo.index.commit(message)
|
||||
@@ -102,16 +127,16 @@ def git_log(repo: git.Repo, max_count: int = 10) -> list[str]:
|
||||
log = []
|
||||
for commit in commits:
|
||||
log.append(
|
||||
f"Commit: {commit.hexsha}\n"
|
||||
f"Author: {commit.author}\n"
|
||||
f"Commit: {commit.hexsha!r}\n"
|
||||
f"Author: {commit.author!r}\n"
|
||||
f"Date: {commit.authored_datetime}\n"
|
||||
f"Message: {commit.message}\n"
|
||||
f"Message: {commit.message!r}\n"
|
||||
)
|
||||
return log
|
||||
|
||||
def git_create_branch(repo: git.Repo, branch_name: str, base_branch: str | None = None) -> str:
|
||||
if base_branch:
|
||||
base = repo.refs[base_branch]
|
||||
base = repo.references[base_branch]
|
||||
else:
|
||||
base = repo.active_branch
|
||||
|
||||
@@ -132,10 +157,10 @@ def git_init(repo_path: str) -> str:
|
||||
def git_show(repo: git.Repo, revision: str) -> str:
|
||||
commit = repo.commit(revision)
|
||||
output = [
|
||||
f"Commit: {commit.hexsha}\n"
|
||||
f"Author: {commit.author}\n"
|
||||
f"Date: {commit.authored_datetime}\n"
|
||||
f"Message: {commit.message}\n"
|
||||
f"Commit: {commit.hexsha!r}\n"
|
||||
f"Author: {commit.author!r}\n"
|
||||
f"Date: {commit.authored_datetime!r}\n"
|
||||
f"Message: {commit.message!r}\n"
|
||||
]
|
||||
if commit.parents:
|
||||
parent = commit.parents[0]
|
||||
@@ -147,6 +172,34 @@ def git_show(repo: git.Repo, revision: str) -> str:
|
||||
output.append(d.diff.decode('utf-8'))
|
||||
return "".join(output)
|
||||
|
||||
def git_branch(repo: git.Repo, branch_type: str, contains: str | None = None, not_contains: str | None = None) -> str:
|
||||
match contains:
|
||||
case None:
|
||||
contains_sha = (None,)
|
||||
case _:
|
||||
contains_sha = ("--contains", contains)
|
||||
|
||||
match not_contains:
|
||||
case None:
|
||||
not_contains_sha = (None,)
|
||||
case _:
|
||||
not_contains_sha = ("--no-contains", not_contains)
|
||||
|
||||
match branch_type:
|
||||
case 'local':
|
||||
b_type = None
|
||||
case 'remote':
|
||||
b_type = "-r"
|
||||
case 'all':
|
||||
b_type = "-a"
|
||||
case _:
|
||||
return f"Invalid branch type: {branch_type}"
|
||||
|
||||
# None value will be auto deleted by GitPython
|
||||
branch_info = repo.git.branch(b_type, *contains_sha, *not_contains_sha)
|
||||
|
||||
return branch_info
|
||||
|
||||
async def serve(repository: Path | None) -> None:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -166,62 +219,67 @@ async def serve(repository: Path | None) -> None:
|
||||
Tool(
|
||||
name=GitTools.STATUS,
|
||||
description="Shows the working tree status",
|
||||
inputSchema=GitStatus.schema(),
|
||||
inputSchema=GitStatus.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.DIFF_UNSTAGED,
|
||||
description="Shows changes in the working directory that are not yet staged",
|
||||
inputSchema=GitDiffUnstaged.schema(),
|
||||
inputSchema=GitDiffUnstaged.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.DIFF_STAGED,
|
||||
description="Shows changes that are staged for commit",
|
||||
inputSchema=GitDiffStaged.schema(),
|
||||
inputSchema=GitDiffStaged.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.DIFF,
|
||||
description="Shows differences between branches or commits",
|
||||
inputSchema=GitDiff.schema(),
|
||||
inputSchema=GitDiff.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.COMMIT,
|
||||
description="Records changes to the repository",
|
||||
inputSchema=GitCommit.schema(),
|
||||
inputSchema=GitCommit.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.ADD,
|
||||
description="Adds file contents to the staging area",
|
||||
inputSchema=GitAdd.schema(),
|
||||
inputSchema=GitAdd.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.RESET,
|
||||
description="Unstages all staged changes",
|
||||
inputSchema=GitReset.schema(),
|
||||
inputSchema=GitReset.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.LOG,
|
||||
description="Shows the commit logs",
|
||||
inputSchema=GitLog.schema(),
|
||||
inputSchema=GitLog.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.CREATE_BRANCH,
|
||||
description="Creates a new branch from an optional base branch",
|
||||
inputSchema=GitCreateBranch.schema(),
|
||||
inputSchema=GitCreateBranch.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.CHECKOUT,
|
||||
description="Switches branches",
|
||||
inputSchema=GitCheckout.schema(),
|
||||
inputSchema=GitCheckout.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.SHOW,
|
||||
description="Shows the contents of a commit",
|
||||
inputSchema=GitShow.schema(),
|
||||
inputSchema=GitShow.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.INIT,
|
||||
description="Initialize a new Git repository",
|
||||
inputSchema=GitInit.schema(),
|
||||
inputSchema=GitInit.model_json_schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.BRANCH,
|
||||
description="List Git branches",
|
||||
inputSchema=GitBranch.model_json_schema(),
|
||||
)
|
||||
]
|
||||
|
||||
@@ -278,21 +336,21 @@ async def serve(repository: Path | None) -> None:
|
||||
)]
|
||||
|
||||
case GitTools.DIFF_UNSTAGED:
|
||||
diff = git_diff_unstaged(repo)
|
||||
diff = git_diff_unstaged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=f"Unstaged changes:\n{diff}"
|
||||
)]
|
||||
|
||||
case GitTools.DIFF_STAGED:
|
||||
diff = git_diff_staged(repo)
|
||||
diff = git_diff_staged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=f"Staged changes:\n{diff}"
|
||||
)]
|
||||
|
||||
case GitTools.DIFF:
|
||||
diff = git_diff(repo, arguments["target"])
|
||||
diff = git_diff(repo, arguments["target"], arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=f"Diff with {arguments['target']}:\n{diff}"
|
||||
@@ -351,6 +409,18 @@ async def serve(repository: Path | None) -> None:
|
||||
text=result
|
||||
)]
|
||||
|
||||
case GitTools.BRANCH:
|
||||
result = git_branch(
|
||||
repo,
|
||||
arguments.get("branch_type", 'local'),
|
||||
arguments.get("contains", None),
|
||||
arguments.get("not_contains", None),
|
||||
)
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=result
|
||||
)]
|
||||
|
||||
case _:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
import git
|
||||
from mcp_server_git.server import git_checkout
|
||||
from mcp_server_git.server import git_checkout, git_branch
|
||||
import shutil
|
||||
|
||||
@pytest.fixture
|
||||
@@ -27,4 +27,44 @@ def test_git_checkout_existing_branch(test_repository):
|
||||
def test_git_checkout_nonexistent_branch(test_repository):
|
||||
|
||||
with pytest.raises(git.GitCommandError):
|
||||
git_checkout(test_repository, "nonexistent-branch")
|
||||
git_checkout(test_repository, "nonexistent-branch")
|
||||
|
||||
def test_git_branch_local(test_repository):
|
||||
test_repository.git.branch("new-branch-local")
|
||||
result = git_branch(test_repository, "local")
|
||||
assert "new-branch-local" in result
|
||||
|
||||
def test_git_branch_remote(test_repository):
|
||||
# GitPython does not easily support creating remote branches without a remote.
|
||||
# This test will check the behavior when 'remote' is specified without actual remotes.
|
||||
result = git_branch(test_repository, "remote")
|
||||
assert "" == result.strip() # Should be empty if no remote branches
|
||||
|
||||
def test_git_branch_all(test_repository):
|
||||
test_repository.git.branch("new-branch-all")
|
||||
result = git_branch(test_repository, "all")
|
||||
assert "new-branch-all" in result
|
||||
|
||||
def test_git_branch_contains(test_repository):
|
||||
# Create a new branch and commit to it
|
||||
test_repository.git.checkout("-b", "feature-branch")
|
||||
Path(test_repository.working_dir / Path("feature.txt")).write_text("feature content")
|
||||
test_repository.index.add(["feature.txt"])
|
||||
commit = test_repository.index.commit("feature commit")
|
||||
test_repository.git.checkout("master")
|
||||
|
||||
result = git_branch(test_repository, "local", contains=commit.hexsha)
|
||||
assert "feature-branch" in result
|
||||
assert "master" not in result
|
||||
|
||||
def test_git_branch_not_contains(test_repository):
|
||||
# Create a new branch and commit to it
|
||||
test_repository.git.checkout("-b", "another-feature-branch")
|
||||
Path(test_repository.working_dir / Path("another_feature.txt")).write_text("another feature content")
|
||||
test_repository.index.add(["another_feature.txt"])
|
||||
commit = test_repository.index.commit("another feature commit")
|
||||
test_repository.git.checkout("master")
|
||||
|
||||
result = git_branch(test_repository, "local", not_contains=commit.hexsha)
|
||||
assert "another-feature-branch" not in result
|
||||
assert "master" in result
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||
COPY src/github /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
FROM node:22.12-alpine AS release
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,372 +0,0 @@
|
||||
# GitHub MCP Server
|
||||
|
||||
**Deprecation Notice:** Development for this project has been moved to GitHub in the http://github.com/github/github-mcp-server repo.
|
||||
|
||||
---
|
||||
|
||||
MCP Server for the GitHub API, enabling file operations, repository management, search functionality, and more.
|
||||
|
||||
### Features
|
||||
|
||||
- **Automatic Branch Creation**: When creating/updating files or pushing changes, branches are automatically created if they don't exist
|
||||
- **Comprehensive Error Handling**: Clear error messages for common issues
|
||||
- **Git History Preservation**: Operations maintain proper Git history without force pushing
|
||||
- **Batch Operations**: Support for both single-file and multi-file operations
|
||||
- **Advanced Search**: Support for searching code, issues/PRs, and users
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
1. `create_or_update_file`
|
||||
- Create or update a single file in a repository
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner (username or organization)
|
||||
- `repo` (string): Repository name
|
||||
- `path` (string): Path where to create/update the file
|
||||
- `content` (string): Content of the file
|
||||
- `message` (string): Commit message
|
||||
- `branch` (string): Branch to create/update the file in
|
||||
- `sha` (optional string): SHA of file being replaced (for updates)
|
||||
- Returns: File content and commit details
|
||||
|
||||
2. `push_files`
|
||||
- Push multiple files in a single commit
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `branch` (string): Branch to push to
|
||||
- `files` (array): Files to push, each with `path` and `content`
|
||||
- `message` (string): Commit message
|
||||
- Returns: Updated branch reference
|
||||
|
||||
3. `search_repositories`
|
||||
- Search for GitHub repositories
|
||||
- Inputs:
|
||||
- `query` (string): Search query
|
||||
- `page` (optional number): Page number for pagination
|
||||
- `perPage` (optional number): Results per page (max 100)
|
||||
- Returns: Repository search results
|
||||
|
||||
4. `create_repository`
|
||||
- Create a new GitHub repository
|
||||
- Inputs:
|
||||
- `name` (string): Repository name
|
||||
- `description` (optional string): Repository description
|
||||
- `private` (optional boolean): Whether repo should be private
|
||||
- `autoInit` (optional boolean): Initialize with README
|
||||
- Returns: Created repository details
|
||||
|
||||
5. `get_file_contents`
|
||||
- Get contents of a file or directory
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `path` (string): Path to file/directory
|
||||
- `branch` (optional string): Branch to get contents from
|
||||
- Returns: File/directory contents
|
||||
|
||||
6. `create_issue`
|
||||
- Create a new issue
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `title` (string): Issue title
|
||||
- `body` (optional string): Issue description
|
||||
- `assignees` (optional string[]): Usernames to assign
|
||||
- `labels` (optional string[]): Labels to add
|
||||
- `milestone` (optional number): Milestone number
|
||||
- Returns: Created issue details
|
||||
|
||||
7. `create_pull_request`
|
||||
- Create a new pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `title` (string): PR title
|
||||
- `body` (optional string): PR description
|
||||
- `head` (string): Branch containing changes
|
||||
- `base` (string): Branch to merge into
|
||||
- `draft` (optional boolean): Create as draft PR
|
||||
- `maintainer_can_modify` (optional boolean): Allow maintainer edits
|
||||
- Returns: Created pull request details
|
||||
|
||||
8. `fork_repository`
|
||||
- Fork a repository
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `organization` (optional string): Organization to fork to
|
||||
- Returns: Forked repository details
|
||||
|
||||
9. `create_branch`
|
||||
- Create a new branch
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `branch` (string): Name for new branch
|
||||
- `from_branch` (optional string): Source branch (defaults to repo default)
|
||||
- Returns: Created branch reference
|
||||
|
||||
10. `list_issues`
|
||||
- List and filter repository issues
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `state` (optional string): Filter by state ('open', 'closed', 'all')
|
||||
- `labels` (optional string[]): Filter by labels
|
||||
- `sort` (optional string): Sort by ('created', 'updated', 'comments')
|
||||
- `direction` (optional string): Sort direction ('asc', 'desc')
|
||||
- `since` (optional string): Filter by date (ISO 8601 timestamp)
|
||||
- `page` (optional number): Page number
|
||||
- `per_page` (optional number): Results per page
|
||||
- Returns: Array of issue details
|
||||
|
||||
11. `update_issue`
|
||||
- Update an existing issue
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `issue_number` (number): Issue number to update
|
||||
- `title` (optional string): New title
|
||||
- `body` (optional string): New description
|
||||
- `state` (optional string): New state ('open' or 'closed')
|
||||
- `labels` (optional string[]): New labels
|
||||
- `assignees` (optional string[]): New assignees
|
||||
- `milestone` (optional number): New milestone number
|
||||
- Returns: Updated issue details
|
||||
|
||||
12. `add_issue_comment`
|
||||
- Add a comment to an issue
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `issue_number` (number): Issue number to comment on
|
||||
- `body` (string): Comment text
|
||||
- Returns: Created comment details
|
||||
|
||||
13. `search_code`
|
||||
- Search for code across GitHub repositories
|
||||
- Inputs:
|
||||
- `q` (string): Search query using GitHub code search syntax
|
||||
- `sort` (optional string): Sort field ('indexed' only)
|
||||
- `order` (optional string): Sort order ('asc' or 'desc')
|
||||
- `per_page` (optional number): Results per page (max 100)
|
||||
- `page` (optional number): Page number
|
||||
- Returns: Code search results with repository context
|
||||
|
||||
14. `search_issues`
|
||||
- Search for issues and pull requests
|
||||
- Inputs:
|
||||
- `q` (string): Search query using GitHub issues search syntax
|
||||
- `sort` (optional string): Sort field (comments, reactions, created, etc.)
|
||||
- `order` (optional string): Sort order ('asc' or 'desc')
|
||||
- `per_page` (optional number): Results per page (max 100)
|
||||
- `page` (optional number): Page number
|
||||
- Returns: Issue and pull request search results
|
||||
|
||||
15. `search_users`
|
||||
- Search for GitHub users
|
||||
- Inputs:
|
||||
- `q` (string): Search query using GitHub users search syntax
|
||||
- `sort` (optional string): Sort field (followers, repositories, joined)
|
||||
- `order` (optional string): Sort order ('asc' or 'desc')
|
||||
- `per_page` (optional number): Results per page (max 100)
|
||||
- `page` (optional number): Page number
|
||||
- Returns: User search results
|
||||
|
||||
16. `list_commits`
|
||||
- Gets commits of a branch in a repository
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `page` (optional string): page number
|
||||
- `per_page` (optional string): number of record per page
|
||||
- `sha` (optional string): branch name
|
||||
- Returns: List of commits
|
||||
|
||||
17. `get_issue`
|
||||
- Gets the contents of an issue within a repository
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `issue_number` (number): Issue number to retrieve
|
||||
- Returns: Github Issue object & details
|
||||
|
||||
18. `get_pull_request`
|
||||
- Get details of a specific pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- Returns: Pull request details including diff and review status
|
||||
|
||||
19. `list_pull_requests`
|
||||
- List and filter repository pull requests
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `state` (optional string): Filter by state ('open', 'closed', 'all')
|
||||
- `head` (optional string): Filter by head user/org and branch
|
||||
- `base` (optional string): Filter by base branch
|
||||
- `sort` (optional string): Sort by ('created', 'updated', 'popularity', 'long-running')
|
||||
- `direction` (optional string): Sort direction ('asc', 'desc')
|
||||
- `per_page` (optional number): Results per page (max 100)
|
||||
- `page` (optional number): Page number
|
||||
- Returns: Array of pull request details
|
||||
|
||||
20. `create_pull_request_review`
|
||||
- Create a review on a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- `body` (string): Review comment text
|
||||
- `event` (string): Review action ('APPROVE', 'REQUEST_CHANGES', 'COMMENT')
|
||||
- `commit_id` (optional string): SHA of commit to review
|
||||
- `comments` (optional array): Line-specific comments, each with:
|
||||
- `path` (string): File path
|
||||
- `position` (number): Line position in diff
|
||||
- `body` (string): Comment text
|
||||
- Returns: Created review details
|
||||
|
||||
21. `merge_pull_request`
|
||||
- Merge a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- `commit_title` (optional string): Title for merge commit
|
||||
- `commit_message` (optional string): Extra detail for merge commit
|
||||
- `merge_method` (optional string): Merge method ('merge', 'squash', 'rebase')
|
||||
- Returns: Merge result details
|
||||
|
||||
22. `get_pull_request_files`
|
||||
- Get the list of files changed in a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- Returns: Array of changed files with patch and status details
|
||||
|
||||
23. `get_pull_request_status`
|
||||
- Get the combined status of all status checks for a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- Returns: Combined status check results and individual check details
|
||||
|
||||
24. `update_pull_request_branch`
|
||||
- Update a pull request branch with the latest changes from the base branch (equivalent to GitHub's "Update branch" button)
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- `expected_head_sha` (optional string): The expected SHA of the pull request's HEAD ref
|
||||
- Returns: Success message when branch is updated
|
||||
|
||||
25. `get_pull_request_comments`
|
||||
- Get the review comments on a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- Returns: Array of pull request review comments with details like the comment text, author, and location in the diff
|
||||
|
||||
26. `get_pull_request_reviews`
|
||||
- Get the reviews on a pull request
|
||||
- Inputs:
|
||||
- `owner` (string): Repository owner
|
||||
- `repo` (string): Repository name
|
||||
- `pull_number` (number): Pull request number
|
||||
- Returns: Array of pull request reviews with details like the review state (APPROVED, CHANGES_REQUESTED, etc.), reviewer, and review body
|
||||
|
||||
## Search Query Syntax
|
||||
|
||||
### Code Search
|
||||
- `language:javascript`: Search by programming language
|
||||
- `repo:owner/name`: Search in specific repository
|
||||
- `path:app/src`: Search in specific path
|
||||
- `extension:js`: Search by file extension
|
||||
- Example: `q: "import express" language:typescript path:src/`
|
||||
|
||||
### Issues Search
|
||||
- `is:issue` or `is:pr`: Filter by type
|
||||
- `is:open` or `is:closed`: Filter by state
|
||||
- `label:bug`: Search by label
|
||||
- `author:username`: Search by author
|
||||
- Example: `q: "memory leak" is:issue is:open label:bug`
|
||||
|
||||
### Users Search
|
||||
- `type:user` or `type:org`: Filter by account type
|
||||
- `followers:>1000`: Filter by followers
|
||||
- `location:London`: Search by location
|
||||
- Example: `q: "fullstack developer" location:London followers:>100`
|
||||
|
||||
For detailed search syntax, see [GitHub's searching documentation](https://docs.github.com/en/search-github/searching-on-github).
|
||||
|
||||
## Setup
|
||||
|
||||
### Personal Access Token
|
||||
[Create a GitHub Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with appropriate permissions:
|
||||
- Go to [Personal access tokens](https://github.com/settings/tokens) (in GitHub Settings > Developer settings)
|
||||
- Select which repositories you'd like this token to have access to (Public, All, or Select)
|
||||
- Create a token with the `repo` scope ("Full control of private repositories")
|
||||
- Alternatively, if working only with public repositories, select only the `public_repo` scope
|
||||
- Copy the generated token
|
||||
|
||||
### Usage with Claude Desktop
|
||||
To use this with Claude Desktop, add the following to your `claude_desktop_config.json`:
|
||||
|
||||
#### Docker
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"github": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"-e",
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN",
|
||||
"mcp/github"
|
||||
],
|
||||
"env": {
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"github": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-github"
|
||||
],
|
||||
"env": {
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
```bash
|
||||
docker build -t mcp/github -f src/github/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,89 +0,0 @@
|
||||
export class GitHubError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly status: number,
|
||||
public readonly response: unknown
|
||||
) {
|
||||
super(message);
|
||||
this.name = "GitHubError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubValidationError extends GitHubError {
|
||||
constructor(message: string, status: number, response: unknown) {
|
||||
super(message, status, response);
|
||||
this.name = "GitHubValidationError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubResourceNotFoundError extends GitHubError {
|
||||
constructor(resource: string) {
|
||||
super(`Resource not found: ${resource}`, 404, { message: `${resource} not found` });
|
||||
this.name = "GitHubResourceNotFoundError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubAuthenticationError extends GitHubError {
|
||||
constructor(message = "Authentication failed") {
|
||||
super(message, 401, { message });
|
||||
this.name = "GitHubAuthenticationError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubPermissionError extends GitHubError {
|
||||
constructor(message = "Insufficient permissions") {
|
||||
super(message, 403, { message });
|
||||
this.name = "GitHubPermissionError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubRateLimitError extends GitHubError {
|
||||
constructor(
|
||||
message = "Rate limit exceeded",
|
||||
public readonly resetAt: Date
|
||||
) {
|
||||
super(message, 429, { message, reset_at: resetAt.toISOString() });
|
||||
this.name = "GitHubRateLimitError";
|
||||
}
|
||||
}
|
||||
|
||||
export class GitHubConflictError extends GitHubError {
|
||||
constructor(message: string) {
|
||||
super(message, 409, { message });
|
||||
this.name = "GitHubConflictError";
|
||||
}
|
||||
}
|
||||
|
||||
export function isGitHubError(error: unknown): error is GitHubError {
|
||||
return error instanceof GitHubError;
|
||||
}
|
||||
|
||||
export function createGitHubError(status: number, response: any): GitHubError {
|
||||
switch (status) {
|
||||
case 401:
|
||||
return new GitHubAuthenticationError(response?.message);
|
||||
case 403:
|
||||
return new GitHubPermissionError(response?.message);
|
||||
case 404:
|
||||
return new GitHubResourceNotFoundError(response?.message || "Resource");
|
||||
case 409:
|
||||
return new GitHubConflictError(response?.message || "Conflict occurred");
|
||||
case 422:
|
||||
return new GitHubValidationError(
|
||||
response?.message || "Validation failed",
|
||||
status,
|
||||
response
|
||||
);
|
||||
case 429:
|
||||
return new GitHubRateLimitError(
|
||||
response?.message,
|
||||
new Date(response?.reset_at || Date.now() + 60000)
|
||||
);
|
||||
default:
|
||||
return new GitHubError(
|
||||
response?.message || "GitHub API error",
|
||||
status,
|
||||
response
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,259 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// Base schemas for common types
|
||||
export const GitHubAuthorSchema = z.object({
|
||||
name: z.string(),
|
||||
email: z.string(),
|
||||
date: z.string(),
|
||||
});
|
||||
|
||||
export const GitHubOwnerSchema = z.object({
|
||||
login: z.string(),
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
avatar_url: z.string(),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
type: z.string(),
|
||||
});
|
||||
|
||||
export const GitHubRepositorySchema = z.object({
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
name: z.string(),
|
||||
full_name: z.string(),
|
||||
private: z.boolean(),
|
||||
owner: GitHubOwnerSchema,
|
||||
html_url: z.string(),
|
||||
description: z.string().nullable(),
|
||||
fork: z.boolean(),
|
||||
url: z.string(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
pushed_at: z.string(),
|
||||
git_url: z.string(),
|
||||
ssh_url: z.string(),
|
||||
clone_url: z.string(),
|
||||
default_branch: z.string(),
|
||||
});
|
||||
|
||||
export const GithubFileContentLinks = z.object({
|
||||
self: z.string(),
|
||||
git: z.string().nullable(),
|
||||
html: z.string().nullable()
|
||||
});
|
||||
|
||||
export const GitHubFileContentSchema = z.object({
|
||||
name: z.string(),
|
||||
path: z.string(),
|
||||
sha: z.string(),
|
||||
size: z.number(),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
git_url: z.string(),
|
||||
download_url: z.string(),
|
||||
type: z.string(),
|
||||
content: z.string().optional(),
|
||||
encoding: z.string().optional(),
|
||||
_links: GithubFileContentLinks
|
||||
});
|
||||
|
||||
export const GitHubDirectoryContentSchema = z.object({
|
||||
type: z.string(),
|
||||
size: z.number(),
|
||||
name: z.string(),
|
||||
path: z.string(),
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
git_url: z.string(),
|
||||
html_url: z.string(),
|
||||
download_url: z.string().nullable(),
|
||||
});
|
||||
|
||||
export const GitHubContentSchema = z.union([
|
||||
GitHubFileContentSchema,
|
||||
z.array(GitHubDirectoryContentSchema),
|
||||
]);
|
||||
|
||||
export const GitHubTreeEntrySchema = z.object({
|
||||
path: z.string(),
|
||||
mode: z.enum(["100644", "100755", "040000", "160000", "120000"]),
|
||||
type: z.enum(["blob", "tree", "commit"]),
|
||||
size: z.number().optional(),
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
});
|
||||
|
||||
export const GitHubTreeSchema = z.object({
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
tree: z.array(GitHubTreeEntrySchema),
|
||||
truncated: z.boolean(),
|
||||
});
|
||||
|
||||
export const GitHubCommitSchema = z.object({
|
||||
sha: z.string(),
|
||||
node_id: z.string(),
|
||||
url: z.string(),
|
||||
author: GitHubAuthorSchema,
|
||||
committer: GitHubAuthorSchema,
|
||||
message: z.string(),
|
||||
tree: z.object({
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
}),
|
||||
parents: z.array(
|
||||
z.object({
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
export const GitHubListCommitsSchema = z.array(z.object({
|
||||
sha: z.string(),
|
||||
node_id: z.string(),
|
||||
commit: z.object({
|
||||
author: GitHubAuthorSchema,
|
||||
committer: GitHubAuthorSchema,
|
||||
message: z.string(),
|
||||
tree: z.object({
|
||||
sha: z.string(),
|
||||
url: z.string()
|
||||
}),
|
||||
url: z.string(),
|
||||
comment_count: z.number(),
|
||||
}),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
comments_url: z.string()
|
||||
}));
|
||||
|
||||
export const GitHubReferenceSchema = z.object({
|
||||
ref: z.string(),
|
||||
node_id: z.string(),
|
||||
url: z.string(),
|
||||
object: z.object({
|
||||
sha: z.string(),
|
||||
type: z.string(),
|
||||
url: z.string(),
|
||||
}),
|
||||
});
|
||||
|
||||
// User and assignee schemas
|
||||
export const GitHubIssueAssigneeSchema = z.object({
|
||||
login: z.string(),
|
||||
id: z.number(),
|
||||
avatar_url: z.string(),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
});
|
||||
|
||||
// Issue-related schemas
|
||||
export const GitHubLabelSchema = z.object({
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
url: z.string(),
|
||||
name: z.string(),
|
||||
color: z.string(),
|
||||
default: z.boolean(),
|
||||
description: z.string().nullable().optional(),
|
||||
});
|
||||
|
||||
export const GitHubMilestoneSchema = z.object({
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
labels_url: z.string(),
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
number: z.number(),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
state: z.string(),
|
||||
});
|
||||
|
||||
export const GitHubIssueSchema = z.object({
|
||||
url: z.string(),
|
||||
repository_url: z.string(),
|
||||
labels_url: z.string(),
|
||||
comments_url: z.string(),
|
||||
events_url: z.string(),
|
||||
html_url: z.string(),
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
number: z.number(),
|
||||
title: z.string(),
|
||||
user: GitHubIssueAssigneeSchema,
|
||||
labels: z.array(GitHubLabelSchema),
|
||||
state: z.string(),
|
||||
locked: z.boolean(),
|
||||
assignee: GitHubIssueAssigneeSchema.nullable(),
|
||||
assignees: z.array(GitHubIssueAssigneeSchema),
|
||||
milestone: GitHubMilestoneSchema.nullable(),
|
||||
comments: z.number(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
closed_at: z.string().nullable(),
|
||||
body: z.string().nullable(),
|
||||
});
|
||||
|
||||
// Search-related schemas
|
||||
export const GitHubSearchResponseSchema = z.object({
|
||||
total_count: z.number(),
|
||||
incomplete_results: z.boolean(),
|
||||
items: z.array(GitHubRepositorySchema),
|
||||
});
|
||||
|
||||
// Pull request schemas
|
||||
export const GitHubPullRequestRefSchema = z.object({
|
||||
label: z.string(),
|
||||
ref: z.string(),
|
||||
sha: z.string(),
|
||||
user: GitHubIssueAssigneeSchema,
|
||||
repo: GitHubRepositorySchema,
|
||||
});
|
||||
|
||||
export const GitHubPullRequestSchema = z.object({
|
||||
url: z.string(),
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
html_url: z.string(),
|
||||
diff_url: z.string(),
|
||||
patch_url: z.string(),
|
||||
issue_url: z.string(),
|
||||
number: z.number(),
|
||||
state: z.string(),
|
||||
locked: z.boolean(),
|
||||
title: z.string(),
|
||||
user: GitHubIssueAssigneeSchema,
|
||||
body: z.string().nullable(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
closed_at: z.string().nullable(),
|
||||
merged_at: z.string().nullable(),
|
||||
merge_commit_sha: z.string().nullable(),
|
||||
assignee: GitHubIssueAssigneeSchema.nullable(),
|
||||
assignees: z.array(GitHubIssueAssigneeSchema),
|
||||
requested_reviewers: z.array(GitHubIssueAssigneeSchema),
|
||||
labels: z.array(GitHubLabelSchema),
|
||||
head: GitHubPullRequestRefSchema,
|
||||
base: GitHubPullRequestRefSchema,
|
||||
});
|
||||
|
||||
// Export types
|
||||
export type GitHubAuthor = z.infer<typeof GitHubAuthorSchema>;
|
||||
export type GitHubRepository = z.infer<typeof GitHubRepositorySchema>;
|
||||
export type GitHubFileContent = z.infer<typeof GitHubFileContentSchema>;
|
||||
export type GitHubDirectoryContent = z.infer<typeof GitHubDirectoryContentSchema>;
|
||||
export type GitHubContent = z.infer<typeof GitHubContentSchema>;
|
||||
export type GitHubTree = z.infer<typeof GitHubTreeSchema>;
|
||||
export type GitHubCommit = z.infer<typeof GitHubCommitSchema>;
|
||||
export type GitHubListCommits = z.infer<typeof GitHubListCommitsSchema>;
|
||||
export type GitHubReference = z.infer<typeof GitHubReferenceSchema>;
|
||||
export type GitHubIssueAssignee = z.infer<typeof GitHubIssueAssigneeSchema>;
|
||||
export type GitHubLabel = z.infer<typeof GitHubLabelSchema>;
|
||||
export type GitHubMilestone = z.infer<typeof GitHubMilestoneSchema>;
|
||||
export type GitHubIssue = z.infer<typeof GitHubIssueSchema>;
|
||||
export type GitHubSearchResponse = z.infer<typeof GitHubSearchResponseSchema>;
|
||||
export type GitHubPullRequest = z.infer<typeof GitHubPullRequestSchema>;
|
||||
export type GitHubPullRequestRef = z.infer<typeof GitHubPullRequestRefSchema>;
|
||||
@@ -1,138 +0,0 @@
|
||||
import { getUserAgent } from "universal-user-agent";
|
||||
import { createGitHubError } from "./errors.js";
|
||||
import { VERSION } from "./version.js";
|
||||
|
||||
type RequestOptions = {
|
||||
method?: string;
|
||||
body?: unknown;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
|
||||
async function parseResponseBody(response: Response): Promise<unknown> {
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (contentType?.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export function buildUrl(baseUrl: string, params: Record<string, string | number | undefined>): string {
|
||||
const url = new URL(baseUrl);
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value !== undefined) {
|
||||
url.searchParams.append(key, value.toString());
|
||||
}
|
||||
});
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
const USER_AGENT = `modelcontextprotocol/servers/github/v${VERSION} ${getUserAgent()}`;
|
||||
|
||||
export async function githubRequest(
|
||||
url: string,
|
||||
options: RequestOptions = {}
|
||||
): Promise<unknown> {
|
||||
const headers: Record<string, string> = {
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": USER_AGENT,
|
||||
...options.headers,
|
||||
};
|
||||
|
||||
if (process.env.GITHUB_PERSONAL_ACCESS_TOKEN) {
|
||||
headers["Authorization"] = `Bearer ${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: options.method || "GET",
|
||||
headers,
|
||||
body: options.body ? JSON.stringify(options.body) : undefined,
|
||||
});
|
||||
|
||||
const responseBody = await parseResponseBody(response);
|
||||
|
||||
if (!response.ok) {
|
||||
throw createGitHubError(response.status, responseBody);
|
||||
}
|
||||
|
||||
return responseBody;
|
||||
}
|
||||
|
||||
export function validateBranchName(branch: string): string {
|
||||
const sanitized = branch.trim();
|
||||
if (!sanitized) {
|
||||
throw new Error("Branch name cannot be empty");
|
||||
}
|
||||
if (sanitized.includes("..")) {
|
||||
throw new Error("Branch name cannot contain '..'");
|
||||
}
|
||||
if (/[\s~^:?*[\\\]]/.test(sanitized)) {
|
||||
throw new Error("Branch name contains invalid characters");
|
||||
}
|
||||
if (sanitized.startsWith("/") || sanitized.endsWith("/")) {
|
||||
throw new Error("Branch name cannot start or end with '/'");
|
||||
}
|
||||
if (sanitized.endsWith(".lock")) {
|
||||
throw new Error("Branch name cannot end with '.lock'");
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
export function validateRepositoryName(name: string): string {
|
||||
const sanitized = name.trim().toLowerCase();
|
||||
if (!sanitized) {
|
||||
throw new Error("Repository name cannot be empty");
|
||||
}
|
||||
if (!/^[a-z0-9_.-]+$/.test(sanitized)) {
|
||||
throw new Error(
|
||||
"Repository name can only contain lowercase letters, numbers, hyphens, periods, and underscores"
|
||||
);
|
||||
}
|
||||
if (sanitized.startsWith(".") || sanitized.endsWith(".")) {
|
||||
throw new Error("Repository name cannot start or end with a period");
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
export function validateOwnerName(owner: string): string {
|
||||
const sanitized = owner.trim().toLowerCase();
|
||||
if (!sanitized) {
|
||||
throw new Error("Owner name cannot be empty");
|
||||
}
|
||||
if (!/^[a-z0-9](?:[a-z0-9]|-(?=[a-z0-9])){0,38}$/.test(sanitized)) {
|
||||
throw new Error(
|
||||
"Owner name must start with a letter or number and can contain up to 39 characters"
|
||||
);
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
export async function checkBranchExists(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/branches/${branch}`
|
||||
);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error && typeof error === "object" && "status" in error && error.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkUserExists(username: string): Promise<boolean> {
|
||||
try {
|
||||
await githubRequest(`https://api.github.com/users/${username}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error && typeof error === "object" && "status" in error && error.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
// If the format of this file changes, so it doesn't simply export a VERSION constant,
|
||||
// this will break .github/workflows/version-check.yml.
|
||||
export const VERSION = "0.6.2";
|
||||
@@ -1,517 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import { z } from 'zod';
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema';
|
||||
import fetch, { Request, Response } from 'node-fetch';
|
||||
|
||||
import * as repository from './operations/repository.js';
|
||||
import * as files from './operations/files.js';
|
||||
import * as issues from './operations/issues.js';
|
||||
import * as pulls from './operations/pulls.js';
|
||||
import * as branches from './operations/branches.js';
|
||||
import * as search from './operations/search.js';
|
||||
import * as commits from './operations/commits.js';
|
||||
import {
|
||||
GitHubError,
|
||||
GitHubValidationError,
|
||||
GitHubResourceNotFoundError,
|
||||
GitHubAuthenticationError,
|
||||
GitHubPermissionError,
|
||||
GitHubRateLimitError,
|
||||
GitHubConflictError,
|
||||
isGitHubError,
|
||||
} from './common/errors.js';
|
||||
import { VERSION } from "./common/version.js";
|
||||
|
||||
// If fetch doesn't exist in global scope, add it
|
||||
if (!globalThis.fetch) {
|
||||
globalThis.fetch = fetch as unknown as typeof global.fetch;
|
||||
}
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: "github-mcp-server",
|
||||
version: VERSION,
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
function formatGitHubError(error: GitHubError): string {
|
||||
let message = `GitHub API Error: ${error.message}`;
|
||||
|
||||
if (error instanceof GitHubValidationError) {
|
||||
message = `Validation Error: ${error.message}`;
|
||||
if (error.response) {
|
||||
message += `\nDetails: ${JSON.stringify(error.response)}`;
|
||||
}
|
||||
} else if (error instanceof GitHubResourceNotFoundError) {
|
||||
message = `Not Found: ${error.message}`;
|
||||
} else if (error instanceof GitHubAuthenticationError) {
|
||||
message = `Authentication Failed: ${error.message}`;
|
||||
} else if (error instanceof GitHubPermissionError) {
|
||||
message = `Permission Denied: ${error.message}`;
|
||||
} else if (error instanceof GitHubRateLimitError) {
|
||||
message = `Rate Limit Exceeded: ${error.message}\nResets at: ${error.resetAt.toISOString()}`;
|
||||
} else if (error instanceof GitHubConflictError) {
|
||||
message = `Conflict: ${error.message}`;
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "create_or_update_file",
|
||||
description: "Create or update a single file in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(files.CreateOrUpdateFileSchema),
|
||||
},
|
||||
{
|
||||
name: "search_repositories",
|
||||
description: "Search for GitHub repositories",
|
||||
inputSchema: zodToJsonSchema(repository.SearchRepositoriesSchema),
|
||||
},
|
||||
{
|
||||
name: "create_repository",
|
||||
description: "Create a new GitHub repository in your account",
|
||||
inputSchema: zodToJsonSchema(repository.CreateRepositoryOptionsSchema),
|
||||
},
|
||||
{
|
||||
name: "get_file_contents",
|
||||
description: "Get the contents of a file or directory from a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(files.GetFileContentsSchema),
|
||||
},
|
||||
{
|
||||
name: "push_files",
|
||||
description: "Push multiple files to a GitHub repository in a single commit",
|
||||
inputSchema: zodToJsonSchema(files.PushFilesSchema),
|
||||
},
|
||||
{
|
||||
name: "create_issue",
|
||||
description: "Create a new issue in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(issues.CreateIssueSchema),
|
||||
},
|
||||
{
|
||||
name: "create_pull_request",
|
||||
description: "Create a new pull request in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(pulls.CreatePullRequestSchema),
|
||||
},
|
||||
{
|
||||
name: "fork_repository",
|
||||
description: "Fork a GitHub repository to your account or specified organization",
|
||||
inputSchema: zodToJsonSchema(repository.ForkRepositorySchema),
|
||||
},
|
||||
{
|
||||
name: "create_branch",
|
||||
description: "Create a new branch in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(branches.CreateBranchSchema),
|
||||
},
|
||||
{
|
||||
name: "list_commits",
|
||||
description: "Get list of commits of a branch in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(commits.ListCommitsSchema)
|
||||
},
|
||||
{
|
||||
name: "list_issues",
|
||||
description: "List issues in a GitHub repository with filtering options",
|
||||
inputSchema: zodToJsonSchema(issues.ListIssuesOptionsSchema)
|
||||
},
|
||||
{
|
||||
name: "update_issue",
|
||||
description: "Update an existing issue in a GitHub repository",
|
||||
inputSchema: zodToJsonSchema(issues.UpdateIssueOptionsSchema)
|
||||
},
|
||||
{
|
||||
name: "add_issue_comment",
|
||||
description: "Add a comment to an existing issue",
|
||||
inputSchema: zodToJsonSchema(issues.IssueCommentSchema)
|
||||
},
|
||||
{
|
||||
name: "search_code",
|
||||
description: "Search for code across GitHub repositories",
|
||||
inputSchema: zodToJsonSchema(search.SearchCodeSchema),
|
||||
},
|
||||
{
|
||||
name: "search_issues",
|
||||
description: "Search for issues and pull requests across GitHub repositories",
|
||||
inputSchema: zodToJsonSchema(search.SearchIssuesSchema),
|
||||
},
|
||||
{
|
||||
name: "search_users",
|
||||
description: "Search for users on GitHub",
|
||||
inputSchema: zodToJsonSchema(search.SearchUsersSchema),
|
||||
},
|
||||
{
|
||||
name: "get_issue",
|
||||
description: "Get details of a specific issue in a GitHub repository.",
|
||||
inputSchema: zodToJsonSchema(issues.GetIssueSchema)
|
||||
},
|
||||
{
|
||||
name: "get_pull_request",
|
||||
description: "Get details of a specific pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.GetPullRequestSchema)
|
||||
},
|
||||
{
|
||||
name: "list_pull_requests",
|
||||
description: "List and filter repository pull requests",
|
||||
inputSchema: zodToJsonSchema(pulls.ListPullRequestsSchema)
|
||||
},
|
||||
{
|
||||
name: "create_pull_request_review",
|
||||
description: "Create a review on a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.CreatePullRequestReviewSchema)
|
||||
},
|
||||
{
|
||||
name: "merge_pull_request",
|
||||
description: "Merge a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.MergePullRequestSchema)
|
||||
},
|
||||
{
|
||||
name: "get_pull_request_files",
|
||||
description: "Get the list of files changed in a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.GetPullRequestFilesSchema)
|
||||
},
|
||||
{
|
||||
name: "get_pull_request_status",
|
||||
description: "Get the combined status of all status checks for a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.GetPullRequestStatusSchema)
|
||||
},
|
||||
{
|
||||
name: "update_pull_request_branch",
|
||||
description: "Update a pull request branch with the latest changes from the base branch",
|
||||
inputSchema: zodToJsonSchema(pulls.UpdatePullRequestBranchSchema)
|
||||
},
|
||||
{
|
||||
name: "get_pull_request_comments",
|
||||
description: "Get the review comments on a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.GetPullRequestCommentsSchema)
|
||||
},
|
||||
{
|
||||
name: "get_pull_request_reviews",
|
||||
description: "Get the reviews on a pull request",
|
||||
inputSchema: zodToJsonSchema(pulls.GetPullRequestReviewsSchema)
|
||||
}
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
if (!request.params.arguments) {
|
||||
throw new Error("Arguments are required");
|
||||
}
|
||||
|
||||
switch (request.params.name) {
|
||||
case "fork_repository": {
|
||||
const args = repository.ForkRepositorySchema.parse(request.params.arguments);
|
||||
const fork = await repository.forkRepository(args.owner, args.repo, args.organization);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(fork, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_branch": {
|
||||
const args = branches.CreateBranchSchema.parse(request.params.arguments);
|
||||
const branch = await branches.createBranchFromRef(
|
||||
args.owner,
|
||||
args.repo,
|
||||
args.branch,
|
||||
args.from_branch
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(branch, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_repositories": {
|
||||
const args = repository.SearchRepositoriesSchema.parse(request.params.arguments);
|
||||
const results = await repository.searchRepositories(
|
||||
args.query,
|
||||
args.page,
|
||||
args.perPage
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_repository": {
|
||||
const args = repository.CreateRepositoryOptionsSchema.parse(request.params.arguments);
|
||||
const result = await repository.createRepository(args);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_file_contents": {
|
||||
const args = files.GetFileContentsSchema.parse(request.params.arguments);
|
||||
const contents = await files.getFileContents(
|
||||
args.owner,
|
||||
args.repo,
|
||||
args.path,
|
||||
args.branch
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(contents, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_or_update_file": {
|
||||
const args = files.CreateOrUpdateFileSchema.parse(request.params.arguments);
|
||||
const result = await files.createOrUpdateFile(
|
||||
args.owner,
|
||||
args.repo,
|
||||
args.path,
|
||||
args.content,
|
||||
args.message,
|
||||
args.branch,
|
||||
args.sha
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "push_files": {
|
||||
const args = files.PushFilesSchema.parse(request.params.arguments);
|
||||
const result = await files.pushFiles(
|
||||
args.owner,
|
||||
args.repo,
|
||||
args.branch,
|
||||
args.files,
|
||||
args.message
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_issue": {
|
||||
const args = issues.CreateIssueSchema.parse(request.params.arguments);
|
||||
const { owner, repo, ...options } = args;
|
||||
|
||||
try {
|
||||
console.error(`[DEBUG] Attempting to create issue in ${owner}/${repo}`);
|
||||
console.error(`[DEBUG] Issue options:`, JSON.stringify(options, null, 2));
|
||||
|
||||
const issue = await issues.createIssue(owner, repo, options);
|
||||
|
||||
console.error(`[DEBUG] Issue created successfully`);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(issue, null, 2) }],
|
||||
};
|
||||
} catch (err) {
|
||||
// Type guard for Error objects
|
||||
const error = err instanceof Error ? err : new Error(String(err));
|
||||
|
||||
console.error(`[ERROR] Failed to create issue:`, error);
|
||||
|
||||
if (error instanceof GitHubResourceNotFoundError) {
|
||||
throw new Error(
|
||||
`Repository '${owner}/${repo}' not found. Please verify:\n` +
|
||||
`1. The repository exists\n` +
|
||||
`2. You have correct access permissions\n` +
|
||||
`3. The owner and repository names are spelled correctly`
|
||||
);
|
||||
}
|
||||
|
||||
// Safely access error properties
|
||||
throw new Error(
|
||||
`Failed to create issue: ${error.message}${
|
||||
error.stack ? `\nStack: ${error.stack}` : ''
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
case "create_pull_request": {
|
||||
const args = pulls.CreatePullRequestSchema.parse(request.params.arguments);
|
||||
const pullRequest = await pulls.createPullRequest(args);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(pullRequest, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_code": {
|
||||
const args = search.SearchCodeSchema.parse(request.params.arguments);
|
||||
const results = await search.searchCode(args);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_issues": {
|
||||
const args = search.SearchIssuesSchema.parse(request.params.arguments);
|
||||
const results = await search.searchIssues(args);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_users": {
|
||||
const args = search.SearchUsersSchema.parse(request.params.arguments);
|
||||
const results = await search.searchUsers(args);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_issues": {
|
||||
const args = issues.ListIssuesOptionsSchema.parse(request.params.arguments);
|
||||
const { owner, repo, ...options } = args;
|
||||
const result = await issues.listIssues(owner, repo, options);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "update_issue": {
|
||||
const args = issues.UpdateIssueOptionsSchema.parse(request.params.arguments);
|
||||
const { owner, repo, issue_number, ...options } = args;
|
||||
const result = await issues.updateIssue(owner, repo, issue_number, options);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "add_issue_comment": {
|
||||
const args = issues.IssueCommentSchema.parse(request.params.arguments);
|
||||
const { owner, repo, issue_number, body } = args;
|
||||
const result = await issues.addIssueComment(owner, repo, issue_number, body);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_commits": {
|
||||
const args = commits.ListCommitsSchema.parse(request.params.arguments);
|
||||
const results = await commits.listCommits(
|
||||
args.owner,
|
||||
args.repo,
|
||||
args.page,
|
||||
args.perPage,
|
||||
args.sha
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_issue": {
|
||||
const args = issues.GetIssueSchema.parse(request.params.arguments);
|
||||
const issue = await issues.getIssue(args.owner, args.repo, args.issue_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(issue, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_pull_request": {
|
||||
const args = pulls.GetPullRequestSchema.parse(request.params.arguments);
|
||||
const pullRequest = await pulls.getPullRequest(args.owner, args.repo, args.pull_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(pullRequest, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_pull_requests": {
|
||||
const args = pulls.ListPullRequestsSchema.parse(request.params.arguments);
|
||||
const { owner, repo, ...options } = args;
|
||||
const pullRequests = await pulls.listPullRequests(owner, repo, options);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(pullRequests, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_pull_request_review": {
|
||||
const args = pulls.CreatePullRequestReviewSchema.parse(request.params.arguments);
|
||||
const { owner, repo, pull_number, ...options } = args;
|
||||
const review = await pulls.createPullRequestReview(owner, repo, pull_number, options);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(review, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "merge_pull_request": {
|
||||
const args = pulls.MergePullRequestSchema.parse(request.params.arguments);
|
||||
const { owner, repo, pull_number, ...options } = args;
|
||||
const result = await pulls.mergePullRequest(owner, repo, pull_number, options);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_pull_request_files": {
|
||||
const args = pulls.GetPullRequestFilesSchema.parse(request.params.arguments);
|
||||
const files = await pulls.getPullRequestFiles(args.owner, args.repo, args.pull_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(files, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_pull_request_status": {
|
||||
const args = pulls.GetPullRequestStatusSchema.parse(request.params.arguments);
|
||||
const status = await pulls.getPullRequestStatus(args.owner, args.repo, args.pull_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(status, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "update_pull_request_branch": {
|
||||
const args = pulls.UpdatePullRequestBranchSchema.parse(request.params.arguments);
|
||||
const { owner, repo, pull_number, expected_head_sha } = args;
|
||||
await pulls.updatePullRequestBranch(owner, repo, pull_number, expected_head_sha);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify({ success: true }, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_pull_request_comments": {
|
||||
const args = pulls.GetPullRequestCommentsSchema.parse(request.params.arguments);
|
||||
const comments = await pulls.getPullRequestComments(args.owner, args.repo, args.pull_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(comments, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_pull_request_reviews": {
|
||||
const args = pulls.GetPullRequestReviewsSchema.parse(request.params.arguments);
|
||||
const reviews = await pulls.getPullRequestReviews(args.owner, args.repo, args.pull_number);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(reviews, null, 2) }],
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
throw new Error(`Invalid input: ${JSON.stringify(error.errors)}`);
|
||||
}
|
||||
if (isGitHubError(error)) {
|
||||
throw new Error(formatGitHubError(error));
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("GitHub MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error in main():", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,112 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest } from "../common/utils.js";
|
||||
import { GitHubReferenceSchema } from "../common/types.js";
|
||||
|
||||
// Schema definitions
|
||||
export const CreateBranchOptionsSchema = z.object({
|
||||
ref: z.string(),
|
||||
sha: z.string(),
|
||||
});
|
||||
|
||||
export const CreateBranchSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
branch: z.string().describe("Name for the new branch"),
|
||||
from_branch: z.string().optional().describe("Optional: source branch to create from (defaults to the repository's default branch)"),
|
||||
});
|
||||
|
||||
// Type exports
|
||||
export type CreateBranchOptions = z.infer<typeof CreateBranchOptionsSchema>;
|
||||
|
||||
// Function implementations
|
||||
export async function getDefaultBranchSHA(owner: string, repo: string): Promise<string> {
|
||||
try {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main`
|
||||
);
|
||||
const data = GitHubReferenceSchema.parse(response);
|
||||
return data.object.sha;
|
||||
} catch (error) {
|
||||
const masterResponse = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master`
|
||||
);
|
||||
if (!masterResponse) {
|
||||
throw new Error("Could not find default branch (tried 'main' and 'master')");
|
||||
}
|
||||
const data = GitHubReferenceSchema.parse(masterResponse);
|
||||
return data.object.sha;
|
||||
}
|
||||
}
|
||||
|
||||
export async function createBranch(
|
||||
owner: string,
|
||||
repo: string,
|
||||
options: CreateBranchOptions
|
||||
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||
const fullRef = `refs/heads/${options.ref}`;
|
||||
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs`,
|
||||
{
|
||||
method: "POST",
|
||||
body: {
|
||||
ref: fullRef,
|
||||
sha: options.sha,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubReferenceSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function getBranchSHA(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string
|
||||
): Promise<string> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`
|
||||
);
|
||||
|
||||
const data = GitHubReferenceSchema.parse(response);
|
||||
return data.object.sha;
|
||||
}
|
||||
|
||||
export async function createBranchFromRef(
|
||||
owner: string,
|
||||
repo: string,
|
||||
newBranch: string,
|
||||
fromBranch?: string
|
||||
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||
let sha: string;
|
||||
if (fromBranch) {
|
||||
sha = await getBranchSHA(owner, repo, fromBranch);
|
||||
} else {
|
||||
sha = await getDefaultBranchSHA(owner, repo);
|
||||
}
|
||||
|
||||
return createBranch(owner, repo, {
|
||||
ref: newBranch,
|
||||
sha,
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateBranch(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
sha: string
|
||||
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`,
|
||||
{
|
||||
method: "PATCH",
|
||||
body: {
|
||||
sha,
|
||||
force: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubReferenceSchema.parse(response);
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||
|
||||
export const ListCommitsSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
sha: z.string().optional(),
|
||||
page: z.number().optional(),
|
||||
perPage: z.number().optional()
|
||||
});
|
||||
|
||||
export async function listCommits(
|
||||
owner: string,
|
||||
repo: string,
|
||||
page?: number,
|
||||
perPage?: number,
|
||||
sha?: string
|
||||
) {
|
||||
return githubRequest(
|
||||
buildUrl(`https://api.github.com/repos/${owner}/${repo}/commits`, {
|
||||
page: page?.toString(),
|
||||
per_page: perPage?.toString(),
|
||||
sha
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -1,219 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest } from "../common/utils.js";
|
||||
import {
|
||||
GitHubContentSchema,
|
||||
GitHubAuthorSchema,
|
||||
GitHubTreeSchema,
|
||||
GitHubCommitSchema,
|
||||
GitHubReferenceSchema,
|
||||
GitHubFileContentSchema,
|
||||
} from "../common/types.js";
|
||||
|
||||
// Schema definitions
|
||||
export const FileOperationSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string(),
|
||||
});
|
||||
|
||||
export const CreateOrUpdateFileSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
path: z.string().describe("Path where to create/update the file"),
|
||||
content: z.string().describe("Content of the file"),
|
||||
message: z.string().describe("Commit message"),
|
||||
branch: z.string().describe("Branch to create/update the file in"),
|
||||
sha: z.string().optional().describe("SHA of the file being replaced (required when updating existing files)"),
|
||||
});
|
||||
|
||||
export const GetFileContentsSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
path: z.string().describe("Path to the file or directory"),
|
||||
branch: z.string().optional().describe("Branch to get contents from"),
|
||||
});
|
||||
|
||||
export const PushFilesSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"),
|
||||
files: z.array(FileOperationSchema).describe("Array of files to push"),
|
||||
message: z.string().describe("Commit message"),
|
||||
});
|
||||
|
||||
export const GitHubCreateUpdateFileResponseSchema = z.object({
|
||||
content: GitHubFileContentSchema.nullable(),
|
||||
commit: z.object({
|
||||
sha: z.string(),
|
||||
node_id: z.string(),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
author: GitHubAuthorSchema,
|
||||
committer: GitHubAuthorSchema,
|
||||
message: z.string(),
|
||||
tree: z.object({
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
}),
|
||||
parents: z.array(
|
||||
z.object({
|
||||
sha: z.string(),
|
||||
url: z.string(),
|
||||
html_url: z.string(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
});
|
||||
|
||||
// Type exports
|
||||
export type FileOperation = z.infer<typeof FileOperationSchema>;
|
||||
export type GitHubCreateUpdateFileResponse = z.infer<typeof GitHubCreateUpdateFileResponseSchema>;
|
||||
|
||||
// Function implementations
|
||||
export async function getFileContents(
|
||||
owner: string,
|
||||
repo: string,
|
||||
path: string,
|
||||
branch?: string
|
||||
) {
|
||||
let url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;
|
||||
if (branch) {
|
||||
url += `?ref=${branch}`;
|
||||
}
|
||||
|
||||
const response = await githubRequest(url);
|
||||
const data = GitHubContentSchema.parse(response);
|
||||
|
||||
// If it's a file, decode the content
|
||||
if (!Array.isArray(data) && data.content) {
|
||||
data.content = Buffer.from(data.content, "base64").toString("utf8");
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
export async function createOrUpdateFile(
|
||||
owner: string,
|
||||
repo: string,
|
||||
path: string,
|
||||
content: string,
|
||||
message: string,
|
||||
branch: string,
|
||||
sha?: string
|
||||
) {
|
||||
const encodedContent = Buffer.from(content).toString("base64");
|
||||
|
||||
let currentSha = sha;
|
||||
if (!currentSha) {
|
||||
try {
|
||||
const existingFile = await getFileContents(owner, repo, path, branch);
|
||||
if (!Array.isArray(existingFile)) {
|
||||
currentSha = existingFile.sha;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Note: File does not exist in branch, will create new file");
|
||||
}
|
||||
}
|
||||
|
||||
const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;
|
||||
const body = {
|
||||
message,
|
||||
content: encodedContent,
|
||||
branch,
|
||||
...(currentSha ? { sha: currentSha } : {}),
|
||||
};
|
||||
|
||||
const response = await githubRequest(url, {
|
||||
method: "PUT",
|
||||
body,
|
||||
});
|
||||
|
||||
return GitHubCreateUpdateFileResponseSchema.parse(response);
|
||||
}
|
||||
|
||||
async function createTree(
|
||||
owner: string,
|
||||
repo: string,
|
||||
files: FileOperation[],
|
||||
baseTree?: string
|
||||
) {
|
||||
const tree = files.map((file) => ({
|
||||
path: file.path,
|
||||
mode: "100644" as const,
|
||||
type: "blob" as const,
|
||||
content: file.content,
|
||||
}));
|
||||
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/trees`,
|
||||
{
|
||||
method: "POST",
|
||||
body: {
|
||||
tree,
|
||||
base_tree: baseTree,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubTreeSchema.parse(response);
|
||||
}
|
||||
|
||||
async function createCommit(
|
||||
owner: string,
|
||||
repo: string,
|
||||
message: string,
|
||||
tree: string,
|
||||
parents: string[]
|
||||
) {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/commits`,
|
||||
{
|
||||
method: "POST",
|
||||
body: {
|
||||
message,
|
||||
tree,
|
||||
parents,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubCommitSchema.parse(response);
|
||||
}
|
||||
|
||||
async function updateReference(
|
||||
owner: string,
|
||||
repo: string,
|
||||
ref: string,
|
||||
sha: string
|
||||
) {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/${ref}`,
|
||||
{
|
||||
method: "PATCH",
|
||||
body: {
|
||||
sha,
|
||||
force: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubReferenceSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function pushFiles(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
files: FileOperation[],
|
||||
message: string
|
||||
) {
|
||||
const refResponse = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`
|
||||
);
|
||||
|
||||
const ref = GitHubReferenceSchema.parse(refResponse);
|
||||
const commitSha = ref.object.sha;
|
||||
|
||||
const tree = await createTree(owner, repo, files, commitSha);
|
||||
const commit = await createCommit(owner, repo, message, tree.sha, [commitSha]);
|
||||
return await updateReference(owner, repo, `heads/${branch}`, commit.sha);
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||
|
||||
export const GetIssueSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
issue_number: z.number(),
|
||||
});
|
||||
|
||||
export const IssueCommentSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
issue_number: z.number(),
|
||||
body: z.string(),
|
||||
});
|
||||
|
||||
export const CreateIssueOptionsSchema = z.object({
|
||||
title: z.string(),
|
||||
body: z.string().optional(),
|
||||
assignees: z.array(z.string()).optional(),
|
||||
milestone: z.number().optional(),
|
||||
labels: z.array(z.string()).optional(),
|
||||
});
|
||||
|
||||
export const CreateIssueSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
...CreateIssueOptionsSchema.shape,
|
||||
});
|
||||
|
||||
export const ListIssuesOptionsSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
direction: z.enum(["asc", "desc"]).optional(),
|
||||
labels: z.array(z.string()).optional(),
|
||||
page: z.number().optional(),
|
||||
per_page: z.number().optional(),
|
||||
since: z.string().optional(),
|
||||
sort: z.enum(["created", "updated", "comments"]).optional(),
|
||||
state: z.enum(["open", "closed", "all"]).optional(),
|
||||
});
|
||||
|
||||
export const UpdateIssueOptionsSchema = z.object({
|
||||
owner: z.string(),
|
||||
repo: z.string(),
|
||||
issue_number: z.number(),
|
||||
title: z.string().optional(),
|
||||
body: z.string().optional(),
|
||||
assignees: z.array(z.string()).optional(),
|
||||
milestone: z.number().optional(),
|
||||
labels: z.array(z.string()).optional(),
|
||||
state: z.enum(["open", "closed"]).optional(),
|
||||
});
|
||||
|
||||
export async function getIssue(owner: string, repo: string, issue_number: number) {
|
||||
return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`);
|
||||
}
|
||||
|
||||
export async function addIssueComment(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issue_number: number,
|
||||
body: string
|
||||
) {
|
||||
return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}/comments`, {
|
||||
method: "POST",
|
||||
body: { body },
|
||||
});
|
||||
}
|
||||
|
||||
export async function createIssue(
|
||||
owner: string,
|
||||
repo: string,
|
||||
options: z.infer<typeof CreateIssueOptionsSchema>
|
||||
) {
|
||||
return githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/issues`,
|
||||
{
|
||||
method: "POST",
|
||||
body: options,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function listIssues(
|
||||
owner: string,
|
||||
repo: string,
|
||||
options: Omit<z.infer<typeof ListIssuesOptionsSchema>, "owner" | "repo">
|
||||
) {
|
||||
const urlParams: Record<string, string | undefined> = {
|
||||
direction: options.direction,
|
||||
labels: options.labels?.join(","),
|
||||
page: options.page?.toString(),
|
||||
per_page: options.per_page?.toString(),
|
||||
since: options.since,
|
||||
sort: options.sort,
|
||||
state: options.state
|
||||
};
|
||||
|
||||
return githubRequest(
|
||||
buildUrl(`https://api.github.com/repos/${owner}/${repo}/issues`, urlParams)
|
||||
);
|
||||
}
|
||||
|
||||
export async function updateIssue(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issue_number: number,
|
||||
options: Omit<z.infer<typeof UpdateIssueOptionsSchema>, "owner" | "repo" | "issue_number">
|
||||
) {
|
||||
return githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`,
|
||||
{
|
||||
method: "PATCH",
|
||||
body: options,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -1,311 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest } from "../common/utils.js";
|
||||
import {
|
||||
GitHubPullRequestSchema,
|
||||
GitHubIssueAssigneeSchema,
|
||||
GitHubRepositorySchema,
|
||||
} from "../common/types.js";
|
||||
|
||||
// Schema definitions
|
||||
export const PullRequestFileSchema = z.object({
|
||||
sha: z.string(),
|
||||
filename: z.string(),
|
||||
status: z.enum(['added', 'removed', 'modified', 'renamed', 'copied', 'changed', 'unchanged']),
|
||||
additions: z.number(),
|
||||
deletions: z.number(),
|
||||
changes: z.number(),
|
||||
blob_url: z.string(),
|
||||
raw_url: z.string(),
|
||||
contents_url: z.string(),
|
||||
patch: z.string().optional()
|
||||
});
|
||||
|
||||
export const StatusCheckSchema = z.object({
|
||||
url: z.string(),
|
||||
state: z.enum(['error', 'failure', 'pending', 'success']),
|
||||
description: z.string().nullable(),
|
||||
target_url: z.string().nullable(),
|
||||
context: z.string(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string()
|
||||
});
|
||||
|
||||
export const CombinedStatusSchema = z.object({
|
||||
state: z.enum(['error', 'failure', 'pending', 'success']),
|
||||
statuses: z.array(StatusCheckSchema),
|
||||
sha: z.string(),
|
||||
total_count: z.number()
|
||||
});
|
||||
|
||||
export const PullRequestCommentSchema = z.object({
|
||||
url: z.string(),
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
pull_request_review_id: z.number().nullable(),
|
||||
diff_hunk: z.string(),
|
||||
path: z.string().nullable(),
|
||||
position: z.number().nullable(),
|
||||
original_position: z.number().nullable(),
|
||||
commit_id: z.string(),
|
||||
original_commit_id: z.string(),
|
||||
user: GitHubIssueAssigneeSchema,
|
||||
body: z.string(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
html_url: z.string(),
|
||||
pull_request_url: z.string(),
|
||||
author_association: z.string(),
|
||||
_links: z.object({
|
||||
self: z.object({ href: z.string() }),
|
||||
html: z.object({ href: z.string() }),
|
||||
pull_request: z.object({ href: z.string() })
|
||||
})
|
||||
});
|
||||
|
||||
export const PullRequestReviewSchema = z.object({
|
||||
id: z.number(),
|
||||
node_id: z.string(),
|
||||
user: GitHubIssueAssigneeSchema,
|
||||
body: z.string().nullable(),
|
||||
state: z.enum(['APPROVED', 'CHANGES_REQUESTED', 'COMMENTED', 'DISMISSED', 'PENDING']),
|
||||
html_url: z.string(),
|
||||
pull_request_url: z.string(),
|
||||
commit_id: z.string(),
|
||||
submitted_at: z.string().nullable(),
|
||||
author_association: z.string()
|
||||
});
|
||||
|
||||
// Input schemas
|
||||
export const CreatePullRequestSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
title: z.string().describe("Pull request title"),
|
||||
body: z.string().optional().describe("Pull request body/description"),
|
||||
head: z.string().describe("The name of the branch where your changes are implemented"),
|
||||
base: z.string().describe("The name of the branch you want the changes pulled into"),
|
||||
draft: z.boolean().optional().describe("Whether to create the pull request as a draft"),
|
||||
maintainer_can_modify: z.boolean().optional().describe("Whether maintainers can modify the pull request")
|
||||
});
|
||||
|
||||
export const GetPullRequestSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number")
|
||||
});
|
||||
|
||||
export const ListPullRequestsSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
state: z.enum(['open', 'closed', 'all']).optional().describe("State of the pull requests to return"),
|
||||
head: z.string().optional().describe("Filter by head user or head organization and branch name"),
|
||||
base: z.string().optional().describe("Filter by base branch name"),
|
||||
sort: z.enum(['created', 'updated', 'popularity', 'long-running']).optional().describe("What to sort results by"),
|
||||
direction: z.enum(['asc', 'desc']).optional().describe("The direction of the sort"),
|
||||
per_page: z.number().optional().describe("Results per page (max 100)"),
|
||||
page: z.number().optional().describe("Page number of the results")
|
||||
});
|
||||
|
||||
export const CreatePullRequestReviewSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number"),
|
||||
commit_id: z.string().optional().describe("The SHA of the commit that needs a review"),
|
||||
body: z.string().describe("The body text of the review"),
|
||||
event: z.enum(['APPROVE', 'REQUEST_CHANGES', 'COMMENT']).describe("The review action to perform"),
|
||||
comments: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
path: z.string().describe("The relative path to the file being commented on"),
|
||||
position: z.number().describe("The position in the diff where you want to add a review comment"),
|
||||
body: z.string().describe("Text of the review comment")
|
||||
}),
|
||||
z.object({
|
||||
path: z.string().describe("The relative path to the file being commented on"),
|
||||
line: z.number().describe("The line number in the file where you want to add a review comment"),
|
||||
body: z.string().describe("Text of the review comment")
|
||||
})
|
||||
])
|
||||
).optional().describe("Comments to post as part of the review (specify either position or line, not both)")
|
||||
});
|
||||
|
||||
export const MergePullRequestSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number"),
|
||||
commit_title: z.string().optional().describe("Title for the automatic commit message"),
|
||||
commit_message: z.string().optional().describe("Extra detail to append to automatic commit message"),
|
||||
merge_method: z.enum(['merge', 'squash', 'rebase']).optional().describe("Merge method to use")
|
||||
});
|
||||
|
||||
export const GetPullRequestFilesSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number")
|
||||
});
|
||||
|
||||
export const GetPullRequestStatusSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number")
|
||||
});
|
||||
|
||||
export const UpdatePullRequestBranchSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number"),
|
||||
expected_head_sha: z.string().optional().describe("The expected SHA of the pull request's HEAD ref")
|
||||
});
|
||||
|
||||
export const GetPullRequestCommentsSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number")
|
||||
});
|
||||
|
||||
export const GetPullRequestReviewsSchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
pull_number: z.number().describe("Pull request number")
|
||||
});
|
||||
|
||||
// Function implementations
|
||||
export async function createPullRequest(
|
||||
params: z.infer<typeof CreatePullRequestSchema>
|
||||
): Promise<z.infer<typeof GitHubPullRequestSchema>> {
|
||||
const { owner, repo, ...options } = CreatePullRequestSchema.parse(params);
|
||||
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls`,
|
||||
{
|
||||
method: "POST",
|
||||
body: options,
|
||||
}
|
||||
);
|
||||
|
||||
return GitHubPullRequestSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function getPullRequest(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number
|
||||
): Promise<z.infer<typeof GitHubPullRequestSchema>> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}`
|
||||
);
|
||||
return GitHubPullRequestSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function listPullRequests(
|
||||
owner: string,
|
||||
repo: string,
|
||||
options: Omit<z.infer<typeof ListPullRequestsSchema>, 'owner' | 'repo'>
|
||||
): Promise<z.infer<typeof GitHubPullRequestSchema>[]> {
|
||||
const url = new URL(`https://api.github.com/repos/${owner}/${repo}/pulls`);
|
||||
|
||||
if (options.state) url.searchParams.append('state', options.state);
|
||||
if (options.head) url.searchParams.append('head', options.head);
|
||||
if (options.base) url.searchParams.append('base', options.base);
|
||||
if (options.sort) url.searchParams.append('sort', options.sort);
|
||||
if (options.direction) url.searchParams.append('direction', options.direction);
|
||||
if (options.per_page) url.searchParams.append('per_page', options.per_page.toString());
|
||||
if (options.page) url.searchParams.append('page', options.page.toString());
|
||||
|
||||
const response = await githubRequest(url.toString());
|
||||
return z.array(GitHubPullRequestSchema).parse(response);
|
||||
}
|
||||
|
||||
export async function createPullRequestReview(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number,
|
||||
options: Omit<z.infer<typeof CreatePullRequestReviewSchema>, 'owner' | 'repo' | 'pull_number'>
|
||||
): Promise<z.infer<typeof PullRequestReviewSchema>> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: options,
|
||||
}
|
||||
);
|
||||
return PullRequestReviewSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function mergePullRequest(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number,
|
||||
options: Omit<z.infer<typeof MergePullRequestSchema>, 'owner' | 'repo' | 'pull_number'>
|
||||
): Promise<any> {
|
||||
return githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/merge`,
|
||||
{
|
||||
method: 'PUT',
|
||||
body: options,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function getPullRequestFiles(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number
|
||||
): Promise<z.infer<typeof PullRequestFileSchema>[]> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/files`
|
||||
);
|
||||
return z.array(PullRequestFileSchema).parse(response);
|
||||
}
|
||||
|
||||
export async function updatePullRequestBranch(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number,
|
||||
expectedHeadSha?: string
|
||||
): Promise<void> {
|
||||
await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/update-branch`,
|
||||
{
|
||||
method: "PUT",
|
||||
body: expectedHeadSha ? { expected_head_sha: expectedHeadSha } : undefined,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function getPullRequestComments(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number
|
||||
): Promise<z.infer<typeof PullRequestCommentSchema>[]> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/comments`
|
||||
);
|
||||
return z.array(PullRequestCommentSchema).parse(response);
|
||||
}
|
||||
|
||||
export async function getPullRequestReviews(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number
|
||||
): Promise<z.infer<typeof PullRequestReviewSchema>[]> {
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews`
|
||||
);
|
||||
return z.array(PullRequestReviewSchema).parse(response);
|
||||
}
|
||||
|
||||
export async function getPullRequestStatus(
|
||||
owner: string,
|
||||
repo: string,
|
||||
pullNumber: number
|
||||
): Promise<z.infer<typeof CombinedStatusSchema>> {
|
||||
// First get the PR to get the head SHA
|
||||
const pr = await getPullRequest(owner, repo, pullNumber);
|
||||
const sha = pr.head.sha;
|
||||
|
||||
// Then get the combined status for that SHA
|
||||
const response = await githubRequest(
|
||||
`https://api.github.com/repos/${owner}/${repo}/commits/${sha}/status`
|
||||
);
|
||||
return CombinedStatusSchema.parse(response);
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest } from "../common/utils.js";
|
||||
import { GitHubRepositorySchema, GitHubSearchResponseSchema } from "../common/types.js";
|
||||
|
||||
// Schema definitions
|
||||
export const CreateRepositoryOptionsSchema = z.object({
|
||||
name: z.string().describe("Repository name"),
|
||||
description: z.string().optional().describe("Repository description"),
|
||||
private: z.boolean().optional().describe("Whether the repository should be private"),
|
||||
autoInit: z.boolean().optional().describe("Initialize with README.md"),
|
||||
});
|
||||
|
||||
export const SearchRepositoriesSchema = z.object({
|
||||
query: z.string().describe("Search query (see GitHub search syntax)"),
|
||||
page: z.number().optional().describe("Page number for pagination (default: 1)"),
|
||||
perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"),
|
||||
});
|
||||
|
||||
export const ForkRepositorySchema = z.object({
|
||||
owner: z.string().describe("Repository owner (username or organization)"),
|
||||
repo: z.string().describe("Repository name"),
|
||||
organization: z.string().optional().describe("Optional: organization to fork to (defaults to your personal account)"),
|
||||
});
|
||||
|
||||
// Type exports
|
||||
export type CreateRepositoryOptions = z.infer<typeof CreateRepositoryOptionsSchema>;
|
||||
|
||||
// Function implementations
|
||||
export async function createRepository(options: CreateRepositoryOptions) {
|
||||
const response = await githubRequest("https://api.github.com/user/repos", {
|
||||
method: "POST",
|
||||
body: options,
|
||||
});
|
||||
return GitHubRepositorySchema.parse(response);
|
||||
}
|
||||
|
||||
export async function searchRepositories(
|
||||
query: string,
|
||||
page: number = 1,
|
||||
perPage: number = 30
|
||||
) {
|
||||
const url = new URL("https://api.github.com/search/repositories");
|
||||
url.searchParams.append("q", query);
|
||||
url.searchParams.append("page", page.toString());
|
||||
url.searchParams.append("per_page", perPage.toString());
|
||||
|
||||
const response = await githubRequest(url.toString());
|
||||
return GitHubSearchResponseSchema.parse(response);
|
||||
}
|
||||
|
||||
export async function forkRepository(
|
||||
owner: string,
|
||||
repo: string,
|
||||
organization?: string
|
||||
) {
|
||||
const url = organization
|
||||
? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}`
|
||||
: `https://api.github.com/repos/${owner}/${repo}/forks`;
|
||||
|
||||
const response = await githubRequest(url, { method: "POST" });
|
||||
return GitHubRepositorySchema.extend({
|
||||
parent: GitHubRepositorySchema,
|
||||
source: GitHubRepositorySchema,
|
||||
}).parse(response);
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
import { z } from "zod";
|
||||
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||
|
||||
export const SearchOptions = z.object({
|
||||
q: z.string(),
|
||||
order: z.enum(["asc", "desc"]).optional(),
|
||||
page: z.number().min(1).optional(),
|
||||
per_page: z.number().min(1).max(100).optional(),
|
||||
});
|
||||
|
||||
export const SearchUsersOptions = SearchOptions.extend({
|
||||
sort: z.enum(["followers", "repositories", "joined"]).optional(),
|
||||
});
|
||||
|
||||
export const SearchIssuesOptions = SearchOptions.extend({
|
||||
sort: z.enum([
|
||||
"comments",
|
||||
"reactions",
|
||||
"reactions-+1",
|
||||
"reactions--1",
|
||||
"reactions-smile",
|
||||
"reactions-thinking_face",
|
||||
"reactions-heart",
|
||||
"reactions-tada",
|
||||
"interactions",
|
||||
"created",
|
||||
"updated",
|
||||
]).optional(),
|
||||
});
|
||||
|
||||
export const SearchCodeSchema = SearchOptions;
|
||||
export const SearchUsersSchema = SearchUsersOptions;
|
||||
export const SearchIssuesSchema = SearchIssuesOptions;
|
||||
|
||||
export async function searchCode(params: z.infer<typeof SearchCodeSchema>) {
|
||||
return githubRequest(buildUrl("https://api.github.com/search/code", params));
|
||||
}
|
||||
|
||||
export async function searchIssues(params: z.infer<typeof SearchIssuesSchema>) {
|
||||
return githubRequest(buildUrl("https://api.github.com/search/issues", params));
|
||||
}
|
||||
|
||||
export async function searchUsers(params: z.infer<typeof SearchUsersSchema>) {
|
||||
return githubRequest(buildUrl("https://api.github.com/search/users", params));
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-github",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for using the GitHub API",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-github": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"@types/node": "^22",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"node-fetch": "^3.3.2",
|
||||
"universal-user-agent": "^7.0.2",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.23.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
COPY src/gitlab /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
|
||||
|
||||
FROM node:22.12-alpine AS release
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,172 +0,0 @@
|
||||
# GitLab MCP Server
|
||||
|
||||
MCP Server for the GitLab API, enabling project management, file operations, and more.
|
||||
|
||||
### Features
|
||||
|
||||
- **Automatic Branch Creation**: When creating/updating files or pushing changes, branches are automatically created if they don't exist
|
||||
- **Comprehensive Error Handling**: Clear error messages for common issues
|
||||
- **Git History Preservation**: Operations maintain proper Git history without force pushing
|
||||
- **Batch Operations**: Support for both single-file and multi-file operations
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
1. `create_or_update_file`
|
||||
- Create or update a single file in a project
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `file_path` (string): Path where to create/update the file
|
||||
- `content` (string): Content of the file
|
||||
- `commit_message` (string): Commit message
|
||||
- `branch` (string): Branch to create/update the file in
|
||||
- `previous_path` (optional string): Path of the file to move/rename
|
||||
- Returns: File content and commit details
|
||||
|
||||
2. `push_files`
|
||||
- Push multiple files in a single commit
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `branch` (string): Branch to push to
|
||||
- `files` (array): Files to push, each with `file_path` and `content`
|
||||
- `commit_message` (string): Commit message
|
||||
- Returns: Updated branch reference
|
||||
|
||||
3. `search_repositories`
|
||||
- Search for GitLab projects
|
||||
- Inputs:
|
||||
- `search` (string): Search query
|
||||
- `page` (optional number): Page number for pagination
|
||||
- `per_page` (optional number): Results per page (default 20)
|
||||
- Returns: Project search results
|
||||
|
||||
4. `create_repository`
|
||||
- Create a new GitLab project
|
||||
- Inputs:
|
||||
- `name` (string): Project name
|
||||
- `description` (optional string): Project description
|
||||
- `visibility` (optional string): 'private', 'internal', or 'public'
|
||||
- `initialize_with_readme` (optional boolean): Initialize with README
|
||||
- Returns: Created project details
|
||||
|
||||
5. `get_file_contents`
|
||||
- Get contents of a file or directory
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `file_path` (string): Path to file/directory
|
||||
- `ref` (optional string): Branch/tag/commit to get contents from
|
||||
- Returns: File/directory contents
|
||||
|
||||
6. `create_issue`
|
||||
- Create a new issue
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `title` (string): Issue title
|
||||
- `description` (optional string): Issue description
|
||||
- `assignee_ids` (optional number[]): User IDs to assign
|
||||
- `labels` (optional string[]): Labels to add
|
||||
- `milestone_id` (optional number): Milestone ID
|
||||
- Returns: Created issue details
|
||||
|
||||
7. `create_merge_request`
|
||||
- Create a new merge request
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `title` (string): MR title
|
||||
- `description` (optional string): MR description
|
||||
- `source_branch` (string): Branch containing changes
|
||||
- `target_branch` (string): Branch to merge into
|
||||
- `draft` (optional boolean): Create as draft MR
|
||||
- `allow_collaboration` (optional boolean): Allow commits from upstream members
|
||||
- Returns: Created merge request details
|
||||
|
||||
8. `fork_repository`
|
||||
- Fork a project
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `namespace` (optional string): Namespace to fork to
|
||||
- Returns: Forked project details
|
||||
|
||||
9. `create_branch`
|
||||
- Create a new branch
|
||||
- Inputs:
|
||||
- `project_id` (string): Project ID or URL-encoded path
|
||||
- `branch` (string): Name for new branch
|
||||
- `ref` (optional string): Source branch/commit for new branch
|
||||
- Returns: Created branch reference
|
||||
|
||||
## Setup
|
||||
|
||||
### Personal Access Token
|
||||
[Create a GitLab Personal Access Token](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) with appropriate permissions:
|
||||
- Go to User Settings > Access Tokens in GitLab
|
||||
- Select the required scopes:
|
||||
- `api` for full API access
|
||||
- `read_api` for read-only access
|
||||
- `read_repository` and `write_repository` for repository operations
|
||||
- Create the token and save it securely
|
||||
|
||||
### Usage with Claude Desktop
|
||||
Add the following to your `claude_desktop_config.json`:
|
||||
|
||||
#### Docker
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"gitlab": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"--rm",
|
||||
"-i",
|
||||
"-e",
|
||||
"GITLAB_PERSONAL_ACCESS_TOKEN",
|
||||
"-e",
|
||||
"GITLAB_API_URL",
|
||||
"mcp/gitlab"
|
||||
],
|
||||
"env": {
|
||||
"GITLAB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>",
|
||||
"GITLAB_API_URL": "https://gitlab.com/api/v4" // Optional, for self-hosted instances
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"gitlab": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-gitlab"
|
||||
],
|
||||
"env": {
|
||||
"GITLAB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>",
|
||||
"GITLAB_API_URL": "https://gitlab.com/api/v4" // Optional, for self-hosted instances
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
```bash
|
||||
docker build -t vonwig/gitlab:mcp -f src/gitlab/Dockerfile .
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
- `GITLAB_PERSONAL_ACCESS_TOKEN`: Your GitLab personal access token (required)
|
||||
- `GITLAB_API_URL`: Base URL for GitLab API (optional, defaults to `https://gitlab.com/api/v4`)
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,534 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fetch from "node-fetch";
|
||||
import { z } from 'zod';
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema';
|
||||
import {
|
||||
GitLabForkSchema,
|
||||
GitLabReferenceSchema,
|
||||
GitLabRepositorySchema,
|
||||
GitLabIssueSchema,
|
||||
GitLabMergeRequestSchema,
|
||||
GitLabContentSchema,
|
||||
GitLabCreateUpdateFileResponseSchema,
|
||||
GitLabSearchResponseSchema,
|
||||
GitLabTreeSchema,
|
||||
GitLabCommitSchema,
|
||||
CreateRepositoryOptionsSchema,
|
||||
CreateIssueOptionsSchema,
|
||||
CreateMergeRequestOptionsSchema,
|
||||
CreateBranchOptionsSchema,
|
||||
CreateOrUpdateFileSchema,
|
||||
SearchRepositoriesSchema,
|
||||
CreateRepositorySchema,
|
||||
GetFileContentsSchema,
|
||||
PushFilesSchema,
|
||||
CreateIssueSchema,
|
||||
CreateMergeRequestSchema,
|
||||
ForkRepositorySchema,
|
||||
CreateBranchSchema,
|
||||
type GitLabFork,
|
||||
type GitLabReference,
|
||||
type GitLabRepository,
|
||||
type GitLabIssue,
|
||||
type GitLabMergeRequest,
|
||||
type GitLabContent,
|
||||
type GitLabCreateUpdateFileResponse,
|
||||
type GitLabSearchResponse,
|
||||
type GitLabTree,
|
||||
type GitLabCommit,
|
||||
type FileOperation,
|
||||
} from './schemas.js';
|
||||
|
||||
const server = new Server({
|
||||
name: "gitlab-mcp-server",
|
||||
version: "0.5.1",
|
||||
}, {
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
});
|
||||
|
||||
const GITLAB_PERSONAL_ACCESS_TOKEN = process.env.GITLAB_PERSONAL_ACCESS_TOKEN;
|
||||
const GITLAB_API_URL = process.env.GITLAB_API_URL || 'https://gitlab.com/api/v4';
|
||||
|
||||
if (!GITLAB_PERSONAL_ACCESS_TOKEN) {
|
||||
console.error("GITLAB_PERSONAL_ACCESS_TOKEN environment variable is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function forkProject(
|
||||
projectId: string,
|
||||
namespace?: string
|
||||
): Promise<GitLabFork> {
|
||||
const url = `${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/fork`;
|
||||
const queryParams = namespace ? `?namespace=${encodeURIComponent(namespace)}` : '';
|
||||
|
||||
const response = await fetch(url + queryParams, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabForkSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function createBranch(
|
||||
projectId: string,
|
||||
options: z.infer<typeof CreateBranchOptionsSchema>
|
||||
): Promise<GitLabReference> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/repository/branches`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
branch: options.name,
|
||||
ref: options.ref
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabReferenceSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function getDefaultBranchRef(projectId: string): Promise<string> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}`,
|
||||
{
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const project = GitLabRepositorySchema.parse(await response.json());
|
||||
return project.default_branch;
|
||||
}
|
||||
|
||||
async function getFileContents(
|
||||
projectId: string,
|
||||
filePath: string,
|
||||
ref?: string
|
||||
): Promise<GitLabContent> {
|
||||
const encodedPath = encodeURIComponent(filePath);
|
||||
let url = `${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/repository/files/${encodedPath}`;
|
||||
if (ref) {
|
||||
url += `?ref=${encodeURIComponent(ref)}`;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = GitLabContentSchema.parse(await response.json());
|
||||
|
||||
if (!Array.isArray(data) && data.content) {
|
||||
data.content = Buffer.from(data.content, 'base64').toString('utf8');
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
async function createIssue(
|
||||
projectId: string,
|
||||
options: z.infer<typeof CreateIssueOptionsSchema>
|
||||
): Promise<GitLabIssue> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/issues`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title: options.title,
|
||||
description: options.description,
|
||||
assignee_ids: options.assignee_ids,
|
||||
milestone_id: options.milestone_id,
|
||||
labels: options.labels?.join(',')
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabIssueSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function createMergeRequest(
|
||||
projectId: string,
|
||||
options: z.infer<typeof CreateMergeRequestOptionsSchema>
|
||||
): Promise<GitLabMergeRequest> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/merge_requests`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title: options.title,
|
||||
description: options.description,
|
||||
source_branch: options.source_branch,
|
||||
target_branch: options.target_branch,
|
||||
allow_collaboration: options.allow_collaboration,
|
||||
draft: options.draft
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabMergeRequestSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function createOrUpdateFile(
|
||||
projectId: string,
|
||||
filePath: string,
|
||||
content: string,
|
||||
commitMessage: string,
|
||||
branch: string,
|
||||
previousPath?: string
|
||||
): Promise<GitLabCreateUpdateFileResponse> {
|
||||
const encodedPath = encodeURIComponent(filePath);
|
||||
const url = `${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/repository/files/${encodedPath}`;
|
||||
|
||||
const body = {
|
||||
branch,
|
||||
content,
|
||||
commit_message: commitMessage,
|
||||
...(previousPath ? { previous_path: previousPath } : {})
|
||||
};
|
||||
|
||||
// Check if file exists
|
||||
let method = "POST";
|
||||
try {
|
||||
await getFileContents(projectId, filePath, branch);
|
||||
method = "PUT";
|
||||
} catch (error) {
|
||||
// File doesn't exist, use POST
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabCreateUpdateFileResponseSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function createTree(
|
||||
projectId: string,
|
||||
files: FileOperation[],
|
||||
ref?: string
|
||||
): Promise<GitLabTree> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/repository/tree`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
files: files.map(file => ({
|
||||
file_path: file.path,
|
||||
content: file.content
|
||||
})),
|
||||
...(ref ? { ref } : {})
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabTreeSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function createCommit(
|
||||
projectId: string,
|
||||
message: string,
|
||||
branch: string,
|
||||
actions: FileOperation[]
|
||||
): Promise<GitLabCommit> {
|
||||
const response = await fetch(
|
||||
`${GITLAB_API_URL}/projects/${encodeURIComponent(projectId)}/repository/commits`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
branch,
|
||||
commit_message: message,
|
||||
actions: actions.map(action => ({
|
||||
action: "create",
|
||||
file_path: action.path,
|
||||
content: action.content
|
||||
}))
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabCommitSchema.parse(await response.json());
|
||||
}
|
||||
|
||||
async function searchProjects(
|
||||
query: string,
|
||||
page: number = 1,
|
||||
perPage: number = 20
|
||||
): Promise<GitLabSearchResponse> {
|
||||
const url = new URL(`${GITLAB_API_URL}/projects`);
|
||||
url.searchParams.append("search", query);
|
||||
url.searchParams.append("page", page.toString());
|
||||
url.searchParams.append("per_page", perPage.toString());
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const projects = await response.json();
|
||||
return GitLabSearchResponseSchema.parse({
|
||||
count: parseInt(response.headers.get("X-Total") || "0"),
|
||||
items: projects
|
||||
});
|
||||
}
|
||||
|
||||
async function createRepository(
|
||||
options: z.infer<typeof CreateRepositoryOptionsSchema>
|
||||
): Promise<GitLabRepository> {
|
||||
const response = await fetch(`${GITLAB_API_URL}/projects`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${GITLAB_PERSONAL_ACCESS_TOKEN}`,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: options.name,
|
||||
description: options.description,
|
||||
visibility: options.visibility,
|
||||
initialize_with_readme: options.initialize_with_readme
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitLab API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return GitLabRepositorySchema.parse(await response.json());
|
||||
}
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "create_or_update_file",
|
||||
description: "Create or update a single file in a GitLab project",
|
||||
inputSchema: zodToJsonSchema(CreateOrUpdateFileSchema)
|
||||
},
|
||||
{
|
||||
name: "search_repositories",
|
||||
description: "Search for GitLab projects",
|
||||
inputSchema: zodToJsonSchema(SearchRepositoriesSchema)
|
||||
},
|
||||
{
|
||||
name: "create_repository",
|
||||
description: "Create a new GitLab project",
|
||||
inputSchema: zodToJsonSchema(CreateRepositorySchema)
|
||||
},
|
||||
{
|
||||
name: "get_file_contents",
|
||||
description: "Get the contents of a file or directory from a GitLab project",
|
||||
inputSchema: zodToJsonSchema(GetFileContentsSchema)
|
||||
},
|
||||
{
|
||||
name: "push_files",
|
||||
description: "Push multiple files to a GitLab project in a single commit",
|
||||
inputSchema: zodToJsonSchema(PushFilesSchema)
|
||||
},
|
||||
{
|
||||
name: "create_issue",
|
||||
description: "Create a new issue in a GitLab project",
|
||||
inputSchema: zodToJsonSchema(CreateIssueSchema)
|
||||
},
|
||||
{
|
||||
name: "create_merge_request",
|
||||
description: "Create a new merge request in a GitLab project",
|
||||
inputSchema: zodToJsonSchema(CreateMergeRequestSchema)
|
||||
},
|
||||
{
|
||||
name: "fork_repository",
|
||||
description: "Fork a GitLab project to your account or specified namespace",
|
||||
inputSchema: zodToJsonSchema(ForkRepositorySchema)
|
||||
},
|
||||
{
|
||||
name: "create_branch",
|
||||
description: "Create a new branch in a GitLab project",
|
||||
inputSchema: zodToJsonSchema(CreateBranchSchema)
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
if (!request.params.arguments) {
|
||||
throw new Error("Arguments are required");
|
||||
}
|
||||
|
||||
switch (request.params.name) {
|
||||
case "fork_repository": {
|
||||
const args = ForkRepositorySchema.parse(request.params.arguments);
|
||||
const fork = await forkProject(args.project_id, args.namespace);
|
||||
return { content: [{ type: "text", text: JSON.stringify(fork, null, 2) }] };
|
||||
}
|
||||
|
||||
case "create_branch": {
|
||||
const args = CreateBranchSchema.parse(request.params.arguments);
|
||||
let ref = args.ref;
|
||||
if (!ref) {
|
||||
ref = await getDefaultBranchRef(args.project_id);
|
||||
}
|
||||
|
||||
const branch = await createBranch(args.project_id, {
|
||||
name: args.branch,
|
||||
ref
|
||||
});
|
||||
|
||||
return { content: [{ type: "text", text: JSON.stringify(branch, null, 2) }] };
|
||||
}
|
||||
|
||||
case "search_repositories": {
|
||||
const args = SearchRepositoriesSchema.parse(request.params.arguments);
|
||||
const results = await searchProjects(args.search, args.page, args.per_page);
|
||||
return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }] };
|
||||
}
|
||||
|
||||
case "create_repository": {
|
||||
const args = CreateRepositorySchema.parse(request.params.arguments);
|
||||
const repository = await createRepository(args);
|
||||
return { content: [{ type: "text", text: JSON.stringify(repository, null, 2) }] };
|
||||
}
|
||||
|
||||
case "get_file_contents": {
|
||||
const args = GetFileContentsSchema.parse(request.params.arguments);
|
||||
const contents = await getFileContents(args.project_id, args.file_path, args.ref);
|
||||
return { content: [{ type: "text", text: JSON.stringify(contents, null, 2) }] };
|
||||
}
|
||||
|
||||
case "create_or_update_file": {
|
||||
const args = CreateOrUpdateFileSchema.parse(request.params.arguments);
|
||||
const result = await createOrUpdateFile(
|
||||
args.project_id,
|
||||
args.file_path,
|
||||
args.content,
|
||||
args.commit_message,
|
||||
args.branch,
|
||||
args.previous_path
|
||||
);
|
||||
return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
|
||||
}
|
||||
|
||||
case "push_files": {
|
||||
const args = PushFilesSchema.parse(request.params.arguments);
|
||||
const result = await createCommit(
|
||||
args.project_id,
|
||||
args.commit_message,
|
||||
args.branch,
|
||||
args.files.map(f => ({ path: f.file_path, content: f.content }))
|
||||
);
|
||||
return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
|
||||
}
|
||||
|
||||
case "create_issue": {
|
||||
const args = CreateIssueSchema.parse(request.params.arguments);
|
||||
const { project_id, ...options } = args;
|
||||
const issue = await createIssue(project_id, options);
|
||||
return { content: [{ type: "text", text: JSON.stringify(issue, null, 2) }] };
|
||||
}
|
||||
|
||||
case "create_merge_request": {
|
||||
const args = CreateMergeRequestSchema.parse(request.params.arguments);
|
||||
const { project_id, ...options } = args;
|
||||
const mergeRequest = await createMergeRequest(project_id, options);
|
||||
return { content: [{ type: "text", text: JSON.stringify(mergeRequest, null, 2) }] };
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
throw new Error(`Invalid arguments: ${error.errors.map(e => `${e.path.join('.')}: ${e.message}`).join(', ')}`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("GitLab MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error in main():", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-gitlab",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for using the GitLab API",
|
||||
"license": "MIT",
|
||||
"author": "GitLab, PBC (https://gitlab.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-gitlab": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"node-fetch": "^3.3.2",
|
||||
"zod-to-json-schema": "^3.23.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,325 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
// Base schemas for common types
|
||||
export const GitLabAuthorSchema = z.object({
|
||||
name: z.string(),
|
||||
email: z.string(),
|
||||
date: z.string()
|
||||
});
|
||||
|
||||
// Repository related schemas
|
||||
export const GitLabOwnerSchema = z.object({
|
||||
username: z.string(), // Changed from login to match GitLab API
|
||||
id: z.number(),
|
||||
avatar_url: z.string(),
|
||||
web_url: z.string(), // Changed from html_url to match GitLab API
|
||||
name: z.string(), // Added as GitLab includes full name
|
||||
state: z.string() // Added as GitLab includes user state
|
||||
});
|
||||
|
||||
export const GitLabRepositorySchema = z.object({
|
||||
id: z.number(),
|
||||
name: z.string(),
|
||||
path_with_namespace: z.string(), // Changed from full_name to match GitLab API
|
||||
visibility: z.string(), // Changed from private to match GitLab API
|
||||
owner: GitLabOwnerSchema.optional(),
|
||||
web_url: z.string(), // Changed from html_url to match GitLab API
|
||||
description: z.string().nullable(),
|
||||
fork: z.boolean().optional(),
|
||||
ssh_url_to_repo: z.string(), // Changed from ssh_url to match GitLab API
|
||||
http_url_to_repo: z.string(), // Changed from clone_url to match GitLab API
|
||||
created_at: z.string(),
|
||||
last_activity_at: z.string(), // Changed from updated_at to match GitLab API
|
||||
default_branch: z.string()
|
||||
});
|
||||
|
||||
// File content schemas
|
||||
export const GitLabFileContentSchema = z.object({
|
||||
file_name: z.string(), // Changed from name to match GitLab API
|
||||
file_path: z.string(), // Changed from path to match GitLab API
|
||||
size: z.number(),
|
||||
encoding: z.string(),
|
||||
content: z.string(),
|
||||
content_sha256: z.string(), // Changed from sha to match GitLab API
|
||||
ref: z.string(), // Added as GitLab requires branch reference
|
||||
blob_id: z.string(), // Added to match GitLab API
|
||||
last_commit_id: z.string() // Added to match GitLab API
|
||||
});
|
||||
|
||||
export const GitLabDirectoryContentSchema = z.object({
|
||||
name: z.string(),
|
||||
path: z.string(),
|
||||
type: z.string(),
|
||||
mode: z.string(),
|
||||
id: z.string(), // Changed from sha to match GitLab API
|
||||
web_url: z.string() // Changed from html_url to match GitLab API
|
||||
});
|
||||
|
||||
export const GitLabContentSchema = z.union([
|
||||
GitLabFileContentSchema,
|
||||
z.array(GitLabDirectoryContentSchema)
|
||||
]);
|
||||
|
||||
// Operation schemas
|
||||
export const FileOperationSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string()
|
||||
});
|
||||
|
||||
// Tree and commit schemas
|
||||
export const GitLabTreeEntrySchema = z.object({
|
||||
id: z.string(), // Changed from sha to match GitLab API
|
||||
name: z.string(),
|
||||
type: z.enum(['blob', 'tree']),
|
||||
path: z.string(),
|
||||
mode: z.string()
|
||||
});
|
||||
|
||||
export const GitLabTreeSchema = z.object({
|
||||
id: z.string(), // Changed from sha to match GitLab API
|
||||
tree: z.array(GitLabTreeEntrySchema)
|
||||
});
|
||||
|
||||
export const GitLabCommitSchema = z.object({
|
||||
id: z.string(), // Changed from sha to match GitLab API
|
||||
short_id: z.string(), // Added to match GitLab API
|
||||
title: z.string(), // Changed from message to match GitLab API
|
||||
author_name: z.string(),
|
||||
author_email: z.string(),
|
||||
authored_date: z.string(),
|
||||
committer_name: z.string(),
|
||||
committer_email: z.string(),
|
||||
committed_date: z.string(),
|
||||
web_url: z.string(), // Changed from html_url to match GitLab API
|
||||
parent_ids: z.array(z.string()) // Changed from parents to match GitLab API
|
||||
});
|
||||
|
||||
// Reference schema
|
||||
export const GitLabReferenceSchema = z.object({
|
||||
name: z.string(), // Changed from ref to match GitLab API
|
||||
commit: z.object({
|
||||
id: z.string(), // Changed from sha to match GitLab API
|
||||
web_url: z.string() // Changed from url to match GitLab API
|
||||
})
|
||||
});
|
||||
|
||||
// Input schemas for operations
|
||||
export const CreateRepositoryOptionsSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
visibility: z.enum(['private', 'internal', 'public']).optional(), // Changed from private to match GitLab API
|
||||
initialize_with_readme: z.boolean().optional() // Changed from auto_init to match GitLab API
|
||||
});
|
||||
|
||||
export const CreateIssueOptionsSchema = z.object({
|
||||
title: z.string(),
|
||||
description: z.string().optional(), // Changed from body to match GitLab API
|
||||
assignee_ids: z.array(z.number()).optional(), // Changed from assignees to match GitLab API
|
||||
milestone_id: z.number().optional(), // Changed from milestone to match GitLab API
|
||||
labels: z.array(z.string()).optional()
|
||||
});
|
||||
|
||||
export const CreateMergeRequestOptionsSchema = z.object({ // Changed from CreatePullRequestOptionsSchema
|
||||
title: z.string(),
|
||||
description: z.string().optional(), // Changed from body to match GitLab API
|
||||
source_branch: z.string(), // Changed from head to match GitLab API
|
||||
target_branch: z.string(), // Changed from base to match GitLab API
|
||||
allow_collaboration: z.boolean().optional(), // Changed from maintainer_can_modify to match GitLab API
|
||||
draft: z.boolean().optional()
|
||||
});
|
||||
|
||||
export const CreateBranchOptionsSchema = z.object({
|
||||
name: z.string(), // Changed from ref to match GitLab API
|
||||
ref: z.string() // The source branch/commit for the new branch
|
||||
});
|
||||
|
||||
// Response schemas for operations
|
||||
export const GitLabCreateUpdateFileResponseSchema = z.object({
|
||||
file_path: z.string(),
|
||||
branch: z.string(),
|
||||
commit_id: z.string(), // Changed from sha to match GitLab API
|
||||
content: GitLabFileContentSchema.optional()
|
||||
});
|
||||
|
||||
export const GitLabSearchResponseSchema = z.object({
|
||||
count: z.number(), // Changed from total_count to match GitLab API
|
||||
items: z.array(GitLabRepositorySchema)
|
||||
});
|
||||
|
||||
// Fork related schemas
|
||||
export const GitLabForkParentSchema = z.object({
|
||||
name: z.string(),
|
||||
path_with_namespace: z.string(), // Changed from full_name to match GitLab API
|
||||
owner: z.object({
|
||||
username: z.string(), // Changed from login to match GitLab API
|
||||
id: z.number(),
|
||||
avatar_url: z.string()
|
||||
}),
|
||||
web_url: z.string() // Changed from html_url to match GitLab API
|
||||
});
|
||||
|
||||
export const GitLabForkSchema = GitLabRepositorySchema.extend({
|
||||
forked_from_project: GitLabForkParentSchema // Changed from parent to match GitLab API
|
||||
});
|
||||
|
||||
// Issue related schemas
|
||||
export const GitLabLabelSchema = z.object({
|
||||
id: z.number(),
|
||||
name: z.string(),
|
||||
color: z.string(),
|
||||
description: z.string().optional()
|
||||
});
|
||||
|
||||
export const GitLabUserSchema = z.object({
|
||||
username: z.string(), // Changed from login to match GitLab API
|
||||
id: z.number(),
|
||||
name: z.string(),
|
||||
avatar_url: z.string(),
|
||||
web_url: z.string() // Changed from html_url to match GitLab API
|
||||
});
|
||||
|
||||
export const GitLabMilestoneSchema = z.object({
|
||||
id: z.number(),
|
||||
iid: z.number(), // Added to match GitLab API
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
state: z.string(),
|
||||
web_url: z.string() // Changed from html_url to match GitLab API
|
||||
});
|
||||
|
||||
export const GitLabIssueSchema = z.object({
|
||||
id: z.number(),
|
||||
iid: z.number(), // Added to match GitLab API
|
||||
project_id: z.number(), // Added to match GitLab API
|
||||
title: z.string(),
|
||||
description: z.string(), // Changed from body to match GitLab API
|
||||
state: z.string(),
|
||||
author: GitLabUserSchema,
|
||||
assignees: z.array(GitLabUserSchema),
|
||||
labels: z.array(GitLabLabelSchema),
|
||||
milestone: GitLabMilestoneSchema.nullable(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
closed_at: z.string().nullable(),
|
||||
web_url: z.string() // Changed from html_url to match GitLab API
|
||||
});
|
||||
|
||||
// Merge Request related schemas (equivalent to Pull Request)
|
||||
export const GitLabMergeRequestDiffRefSchema = z.object({
|
||||
base_sha: z.string(),
|
||||
head_sha: z.string(),
|
||||
start_sha: z.string()
|
||||
});
|
||||
|
||||
export const GitLabMergeRequestSchema = z.object({
|
||||
id: z.number(),
|
||||
iid: z.number(), // Added to match GitLab API
|
||||
project_id: z.number(), // Added to match GitLab API
|
||||
title: z.string(),
|
||||
description: z.string(), // Changed from body to match GitLab API
|
||||
state: z.string(),
|
||||
merged: z.boolean().optional(),
|
||||
author: GitLabUserSchema,
|
||||
assignees: z.array(GitLabUserSchema),
|
||||
source_branch: z.string(), // Changed from head to match GitLab API
|
||||
target_branch: z.string(), // Changed from base to match GitLab API
|
||||
diff_refs: GitLabMergeRequestDiffRefSchema.nullable(),
|
||||
web_url: z.string(), // Changed from html_url to match GitLab API
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
merged_at: z.string().nullable(),
|
||||
closed_at: z.string().nullable(),
|
||||
merge_commit_sha: z.string().nullable()
|
||||
});
|
||||
|
||||
// API Operation Parameter Schemas
|
||||
const ProjectParamsSchema = z.object({
|
||||
project_id: z.string().describe("Project ID or URL-encoded path") // Changed from owner/repo to match GitLab API
|
||||
});
|
||||
|
||||
export const CreateOrUpdateFileSchema = ProjectParamsSchema.extend({
|
||||
file_path: z.string().describe("Path where to create/update the file"),
|
||||
content: z.string().describe("Content of the file"),
|
||||
commit_message: z.string().describe("Commit message"),
|
||||
branch: z.string().describe("Branch to create/update the file in"),
|
||||
previous_path: z.string().optional()
|
||||
.describe("Path of the file to move/rename")
|
||||
});
|
||||
|
||||
export const SearchRepositoriesSchema = z.object({
|
||||
search: z.string().describe("Search query"), // Changed from query to match GitLab API
|
||||
page: z.number().optional().describe("Page number for pagination (default: 1)"),
|
||||
per_page: z.number().optional().describe("Number of results per page (default: 20)")
|
||||
});
|
||||
|
||||
export const CreateRepositorySchema = z.object({
|
||||
name: z.string().describe("Repository name"),
|
||||
description: z.string().optional().describe("Repository description"),
|
||||
visibility: z.enum(['private', 'internal', 'public']).optional()
|
||||
.describe("Repository visibility level"),
|
||||
initialize_with_readme: z.boolean().optional()
|
||||
.describe("Initialize with README.md")
|
||||
});
|
||||
|
||||
export const GetFileContentsSchema = ProjectParamsSchema.extend({
|
||||
file_path: z.string().describe("Path to the file or directory"),
|
||||
ref: z.string().optional().describe("Branch/tag/commit to get contents from")
|
||||
});
|
||||
|
||||
export const PushFilesSchema = ProjectParamsSchema.extend({
|
||||
branch: z.string().describe("Branch to push to"),
|
||||
files: z.array(z.object({
|
||||
file_path: z.string().describe("Path where to create the file"),
|
||||
content: z.string().describe("Content of the file")
|
||||
})).describe("Array of files to push"),
|
||||
commit_message: z.string().describe("Commit message")
|
||||
});
|
||||
|
||||
export const CreateIssueSchema = ProjectParamsSchema.extend({
|
||||
title: z.string().describe("Issue title"),
|
||||
description: z.string().optional().describe("Issue description"),
|
||||
assignee_ids: z.array(z.number()).optional().describe("Array of user IDs to assign"),
|
||||
labels: z.array(z.string()).optional().describe("Array of label names"),
|
||||
milestone_id: z.number().optional().describe("Milestone ID to assign")
|
||||
});
|
||||
|
||||
export const CreateMergeRequestSchema = ProjectParamsSchema.extend({
|
||||
title: z.string().describe("Merge request title"),
|
||||
description: z.string().optional().describe("Merge request description"),
|
||||
source_branch: z.string().describe("Branch containing changes"),
|
||||
target_branch: z.string().describe("Branch to merge into"),
|
||||
draft: z.boolean().optional().describe("Create as draft merge request"),
|
||||
allow_collaboration: z.boolean().optional()
|
||||
.describe("Allow commits from upstream members")
|
||||
});
|
||||
|
||||
export const ForkRepositorySchema = ProjectParamsSchema.extend({
|
||||
namespace: z.string().optional()
|
||||
.describe("Namespace to fork to (full path)")
|
||||
});
|
||||
|
||||
export const CreateBranchSchema = ProjectParamsSchema.extend({
|
||||
branch: z.string().describe("Name for the new branch"),
|
||||
ref: z.string().optional()
|
||||
.describe("Source branch/commit for new branch")
|
||||
});
|
||||
|
||||
// Export types
|
||||
export type GitLabAuthor = z.infer<typeof GitLabAuthorSchema>;
|
||||
export type GitLabFork = z.infer<typeof GitLabForkSchema>;
|
||||
export type GitLabIssue = z.infer<typeof GitLabIssueSchema>;
|
||||
export type GitLabMergeRequest = z.infer<typeof GitLabMergeRequestSchema>;
|
||||
export type GitLabRepository = z.infer<typeof GitLabRepositorySchema>;
|
||||
export type GitLabFileContent = z.infer<typeof GitLabFileContentSchema>;
|
||||
export type GitLabDirectoryContent = z.infer<typeof GitLabDirectoryContentSchema>;
|
||||
export type GitLabContent = z.infer<typeof GitLabContentSchema>;
|
||||
export type FileOperation = z.infer<typeof FileOperationSchema>;
|
||||
export type GitLabTree = z.infer<typeof GitLabTreeSchema>;
|
||||
export type GitLabCommit = z.infer<typeof GitLabCommitSchema>;
|
||||
export type GitLabReference = z.infer<typeof GitLabReferenceSchema>;
|
||||
export type CreateRepositoryOptions = z.infer<typeof CreateRepositoryOptionsSchema>;
|
||||
export type CreateIssueOptions = z.infer<typeof CreateIssueOptionsSchema>;
|
||||
export type CreateMergeRequestOptions = z.infer<typeof CreateMergeRequestOptionsSchema>;
|
||||
export type CreateBranchOptions = z.infer<typeof CreateBranchOptionsSchema>;
|
||||
export type GitLabCreateUpdateFileResponse = z.infer<typeof GitLabCreateUpdateFileResponseSchema>;
|
||||
export type GitLabSearchResponse = z.infer<typeof GitLabSearchResponseSchema>;
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||
COPY src/google-maps /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,114 +0,0 @@
|
||||
# Google Maps MCP Server
|
||||
|
||||
MCP Server for the Google Maps API.
|
||||
|
||||
## Tools
|
||||
|
||||
1. `maps_geocode`
|
||||
- Convert address to coordinates
|
||||
- Input: `address` (string)
|
||||
- Returns: location, formatted_address, place_id
|
||||
|
||||
2. `maps_reverse_geocode`
|
||||
- Convert coordinates to address
|
||||
- Inputs:
|
||||
- `latitude` (number)
|
||||
- `longitude` (number)
|
||||
- Returns: formatted_address, place_id, address_components
|
||||
|
||||
3. `maps_search_places`
|
||||
- Search for places using text query
|
||||
- Inputs:
|
||||
- `query` (string)
|
||||
- `location` (optional): { latitude: number, longitude: number }
|
||||
- `radius` (optional): number (meters, max 50000)
|
||||
- Returns: array of places with names, addresses, locations
|
||||
|
||||
4. `maps_place_details`
|
||||
- Get detailed information about a place
|
||||
- Input: `place_id` (string)
|
||||
- Returns: name, address, contact info, ratings, reviews, opening hours
|
||||
|
||||
5. `maps_distance_matrix`
|
||||
- Calculate distances and times between points
|
||||
- Inputs:
|
||||
- `origins` (string[])
|
||||
- `destinations` (string[])
|
||||
- `mode` (optional): "driving" | "walking" | "bicycling" | "transit"
|
||||
- Returns: distances and durations matrix
|
||||
|
||||
6. `maps_elevation`
|
||||
- Get elevation data for locations
|
||||
- Input: `locations` (array of {latitude, longitude})
|
||||
- Returns: elevation data for each point
|
||||
|
||||
7. `maps_directions`
|
||||
- Get directions between points
|
||||
- Inputs:
|
||||
- `origin` (string)
|
||||
- `destination` (string)
|
||||
- `mode` (optional): "driving" | "walking" | "bicycling" | "transit"
|
||||
- Returns: route details with steps, distance, duration
|
||||
|
||||
## Setup
|
||||
|
||||
### API Key
|
||||
Get a Google Maps API key by following the instructions [here](https://developers.google.com/maps/documentation/javascript/get-api-key#create-api-keys).
|
||||
|
||||
### Usage with Claude Desktop
|
||||
|
||||
Add the following to your `claude_desktop_config.json`:
|
||||
|
||||
#### Docker
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"google-maps": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"-e",
|
||||
"GOOGLE_MAPS_API_KEY",
|
||||
"mcp/google-maps"
|
||||
],
|
||||
"env": {
|
||||
"GOOGLE_MAPS_API_KEY": "<YOUR_API_KEY>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"google-maps": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-google-maps"
|
||||
],
|
||||
"env": {
|
||||
"GOOGLE_MAPS_API_KEY": "<YOUR_API_KEY>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
```bash
|
||||
docker build -t mcp/google-maps -f src/google-maps/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,678 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
// Response interfaces
|
||||
interface GoogleMapsResponse {
|
||||
status: string;
|
||||
error_message?: string;
|
||||
}
|
||||
|
||||
interface GeocodeResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
address_components: Array<{
|
||||
long_name: string;
|
||||
short_name: string;
|
||||
types: string[];
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface PlacesSearchResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
name: string;
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
rating?: number;
|
||||
types: string[];
|
||||
}>;
|
||||
}
|
||||
|
||||
interface PlaceDetailsResponse extends GoogleMapsResponse {
|
||||
result: {
|
||||
name: string;
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
formatted_phone_number?: string;
|
||||
website?: string;
|
||||
rating?: number;
|
||||
reviews?: Array<{
|
||||
author_name: string;
|
||||
rating: number;
|
||||
text: string;
|
||||
time: number;
|
||||
}>;
|
||||
opening_hours?: {
|
||||
weekday_text: string[];
|
||||
open_now: boolean;
|
||||
};
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface DistanceMatrixResponse extends GoogleMapsResponse {
|
||||
origin_addresses: string[];
|
||||
destination_addresses: string[];
|
||||
rows: Array<{
|
||||
elements: Array<{
|
||||
status: string;
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface ElevationResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
elevation: number;
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
};
|
||||
resolution: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface DirectionsResponse extends GoogleMapsResponse {
|
||||
routes: Array<{
|
||||
summary: string;
|
||||
legs: Array<{
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
steps: Array<{
|
||||
html_instructions: string;
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
travel_mode: string;
|
||||
}>;
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
function getApiKey(): string {
|
||||
const apiKey = process.env.GOOGLE_MAPS_API_KEY;
|
||||
if (!apiKey) {
|
||||
console.error("GOOGLE_MAPS_API_KEY environment variable is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
const GOOGLE_MAPS_API_KEY = getApiKey();
|
||||
|
||||
// Tool definitions
|
||||
const GEOCODE_TOOL: Tool = {
|
||||
name: "maps_geocode",
|
||||
description: "Convert an address into geographic coordinates",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
address: {
|
||||
type: "string",
|
||||
description: "The address to geocode"
|
||||
}
|
||||
},
|
||||
required: ["address"]
|
||||
}
|
||||
};
|
||||
|
||||
const REVERSE_GEOCODE_TOOL: Tool = {
|
||||
name: "maps_reverse_geocode",
|
||||
description: "Convert coordinates into an address",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: {
|
||||
type: "number",
|
||||
description: "Latitude coordinate"
|
||||
},
|
||||
longitude: {
|
||||
type: "number",
|
||||
description: "Longitude coordinate"
|
||||
}
|
||||
},
|
||||
required: ["latitude", "longitude"]
|
||||
}
|
||||
};
|
||||
|
||||
const SEARCH_PLACES_TOOL: Tool = {
|
||||
name: "maps_search_places",
|
||||
description: "Search for places using Google Places API",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description: "Search query"
|
||||
},
|
||||
location: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: { type: "number" },
|
||||
longitude: { type: "number" }
|
||||
},
|
||||
description: "Optional center point for the search"
|
||||
},
|
||||
radius: {
|
||||
type: "number",
|
||||
description: "Search radius in meters (max 50000)"
|
||||
}
|
||||
},
|
||||
required: ["query"]
|
||||
}
|
||||
};
|
||||
|
||||
const PLACE_DETAILS_TOOL: Tool = {
|
||||
name: "maps_place_details",
|
||||
description: "Get detailed information about a specific place",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
place_id: {
|
||||
type: "string",
|
||||
description: "The place ID to get details for"
|
||||
}
|
||||
},
|
||||
required: ["place_id"]
|
||||
}
|
||||
};
|
||||
|
||||
const DISTANCE_MATRIX_TOOL: Tool = {
|
||||
name: "maps_distance_matrix",
|
||||
description: "Calculate travel distance and time for multiple origins and destinations",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
origins: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "Array of origin addresses or coordinates"
|
||||
},
|
||||
destinations: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "Array of destination addresses or coordinates"
|
||||
},
|
||||
mode: {
|
||||
type: "string",
|
||||
description: "Travel mode (driving, walking, bicycling, transit)",
|
||||
enum: ["driving", "walking", "bicycling", "transit"]
|
||||
}
|
||||
},
|
||||
required: ["origins", "destinations"]
|
||||
}
|
||||
};
|
||||
|
||||
const ELEVATION_TOOL: Tool = {
|
||||
name: "maps_elevation",
|
||||
description: "Get elevation data for locations on the earth",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
locations: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: { type: "number" },
|
||||
longitude: { type: "number" }
|
||||
},
|
||||
required: ["latitude", "longitude"]
|
||||
},
|
||||
description: "Array of locations to get elevation for"
|
||||
}
|
||||
},
|
||||
required: ["locations"]
|
||||
}
|
||||
};
|
||||
|
||||
const DIRECTIONS_TOOL: Tool = {
|
||||
name: "maps_directions",
|
||||
description: "Get directions between two points",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
origin: {
|
||||
type: "string",
|
||||
description: "Starting point address or coordinates"
|
||||
},
|
||||
destination: {
|
||||
type: "string",
|
||||
description: "Ending point address or coordinates"
|
||||
},
|
||||
mode: {
|
||||
type: "string",
|
||||
description: "Travel mode (driving, walking, bicycling, transit)",
|
||||
enum: ["driving", "walking", "bicycling", "transit"]
|
||||
}
|
||||
},
|
||||
required: ["origin", "destination"]
|
||||
}
|
||||
};
|
||||
|
||||
const MAPS_TOOLS = [
|
||||
GEOCODE_TOOL,
|
||||
REVERSE_GEOCODE_TOOL,
|
||||
SEARCH_PLACES_TOOL,
|
||||
PLACE_DETAILS_TOOL,
|
||||
DISTANCE_MATRIX_TOOL,
|
||||
ELEVATION_TOOL,
|
||||
DIRECTIONS_TOOL,
|
||||
] as const;
|
||||
|
||||
// API handlers
|
||||
async function handleGeocode(address: string) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/geocode/json");
|
||||
url.searchParams.append("address", address);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as GeocodeResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Geocoding failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
location: data.results[0].geometry.location,
|
||||
formatted_address: data.results[0].formatted_address,
|
||||
place_id: data.results[0].place_id
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
async function handleReverseGeocode(latitude: number, longitude: number) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/geocode/json");
|
||||
url.searchParams.append("latlng", `${latitude},${longitude}`);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as GeocodeResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Reverse geocoding failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
formatted_address: data.results[0].formatted_address,
|
||||
place_id: data.results[0].place_id,
|
||||
address_components: data.results[0].address_components
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
async function handlePlaceSearch(
|
||||
query: string,
|
||||
location?: { latitude: number; longitude: number },
|
||||
radius?: number
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/place/textsearch/json");
|
||||
url.searchParams.append("query", query);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
if (location) {
|
||||
url.searchParams.append("location", `${location.latitude},${location.longitude}`);
|
||||
}
|
||||
if (radius) {
|
||||
url.searchParams.append("radius", radius.toString());
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as PlacesSearchResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Place search failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
places: data.results.map((place) => ({
|
||||
name: place.name,
|
||||
formatted_address: place.formatted_address,
|
||||
location: place.geometry.location,
|
||||
place_id: place.place_id,
|
||||
rating: place.rating,
|
||||
types: place.types
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
async function handlePlaceDetails(place_id: string) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/place/details/json");
|
||||
url.searchParams.append("place_id", place_id);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as PlaceDetailsResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Place details request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
name: data.result.name,
|
||||
formatted_address: data.result.formatted_address,
|
||||
location: data.result.geometry.location,
|
||||
formatted_phone_number: data.result.formatted_phone_number,
|
||||
website: data.result.website,
|
||||
rating: data.result.rating,
|
||||
reviews: data.result.reviews,
|
||||
opening_hours: data.result.opening_hours
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
async function handleDistanceMatrix(
|
||||
origins: string[],
|
||||
destinations: string[],
|
||||
mode: "driving" | "walking" | "bicycling" | "transit" = "driving"
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/distancematrix/json");
|
||||
url.searchParams.append("origins", origins.join("|"));
|
||||
url.searchParams.append("destinations", destinations.join("|"));
|
||||
url.searchParams.append("mode", mode);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as DistanceMatrixResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Distance matrix request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
origin_addresses: data.origin_addresses,
|
||||
destination_addresses: data.destination_addresses,
|
||||
results: data.rows.map((row) => ({
|
||||
elements: row.elements.map((element) => ({
|
||||
status: element.status,
|
||||
duration: element.duration,
|
||||
distance: element.distance
|
||||
}))
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
async function handleElevation(locations: Array<{ latitude: number; longitude: number }>) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/elevation/json");
|
||||
const locationString = locations
|
||||
.map((loc) => `${loc.latitude},${loc.longitude}`)
|
||||
.join("|");
|
||||
url.searchParams.append("locations", locationString);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as ElevationResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Elevation request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
results: data.results.map((result) => ({
|
||||
elevation: result.elevation,
|
||||
location: result.location,
|
||||
resolution: result.resolution
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
async function handleDirections(
|
||||
origin: string,
|
||||
destination: string,
|
||||
mode: "driving" | "walking" | "bicycling" | "transit" = "driving"
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/directions/json");
|
||||
url.searchParams.append("origin", origin);
|
||||
url.searchParams.append("destination", destination);
|
||||
url.searchParams.append("mode", mode);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as DirectionsResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Directions request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
routes: data.routes.map((route) => ({
|
||||
summary: route.summary,
|
||||
distance: route.legs[0].distance,
|
||||
duration: route.legs[0].duration,
|
||||
steps: route.legs[0].steps.map((step) => ({
|
||||
instructions: step.html_instructions,
|
||||
distance: step.distance,
|
||||
duration: step.duration,
|
||||
travel_mode: step.travel_mode
|
||||
}))
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
};
|
||||
}
|
||||
|
||||
// Server setup
|
||||
const server = new Server(
|
||||
{
|
||||
name: "mcp-server/google-maps",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Set up request handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: MAPS_TOOLS,
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
switch (request.params.name) {
|
||||
case "maps_geocode": {
|
||||
const { address } = request.params.arguments as { address: string };
|
||||
return await handleGeocode(address);
|
||||
}
|
||||
|
||||
case "maps_reverse_geocode": {
|
||||
const { latitude, longitude } = request.params.arguments as {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
};
|
||||
return await handleReverseGeocode(latitude, longitude);
|
||||
}
|
||||
|
||||
case "maps_search_places": {
|
||||
const { query, location, radius } = request.params.arguments as {
|
||||
query: string;
|
||||
location?: { latitude: number; longitude: number };
|
||||
radius?: number;
|
||||
};
|
||||
return await handlePlaceSearch(query, location, radius);
|
||||
}
|
||||
|
||||
case "maps_place_details": {
|
||||
const { place_id } = request.params.arguments as { place_id: string };
|
||||
return await handlePlaceDetails(place_id);
|
||||
}
|
||||
|
||||
case "maps_distance_matrix": {
|
||||
const { origins, destinations, mode } = request.params.arguments as {
|
||||
origins: string[];
|
||||
destinations: string[];
|
||||
mode?: "driving" | "walking" | "bicycling" | "transit";
|
||||
};
|
||||
return await handleDistanceMatrix(origins, destinations, mode);
|
||||
}
|
||||
|
||||
case "maps_elevation": {
|
||||
const { locations } = request.params.arguments as {
|
||||
locations: Array<{ latitude: number; longitude: number }>;
|
||||
};
|
||||
return await handleElevation(locations);
|
||||
}
|
||||
|
||||
case "maps_directions": {
|
||||
const { origin, destination, mode } = request.params.arguments as {
|
||||
origin: string;
|
||||
destination: string;
|
||||
mode?: "driving" | "walking" | "bicycling" | "transit";
|
||||
};
|
||||
return await handleDirections(origin, destination, mode);
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Unknown tool: ${request.params.name}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Error: ${error instanceof Error ? error.message : String(error)}`
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Google Maps MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error running server:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-google-maps",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for using the Google Maps API",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-google-maps": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"node-fetch": "^3.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
# Knowledge Graph Memory Server
|
||||
|
||||
A basic implementation of persistent memory using a local knowledge graph. This lets Claude remember information about the user across chats.
|
||||
|
||||
## Core Concepts
|
||||
@@ -181,6 +182,60 @@ The server can be configured using the following environment variables:
|
||||
|
||||
- `MEMORY_FILE_PATH`: Path to the memory storage JSON file (default: `memory.json` in the server directory)
|
||||
|
||||
# VS Code Installation Instructions
|
||||
|
||||
For quick installation, use one of the one-click installation buttons below:
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D&quality=insiders)
|
||||
|
||||
[](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D&quality=insiders)
|
||||
|
||||
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
|
||||
|
||||
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
||||
|
||||
> Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
|
||||
|
||||
#### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"memory": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-memory"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Docker
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"memory": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"-v",
|
||||
"claude-memory:/app/dist",
|
||||
"--rm",
|
||||
"mcp/memory"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### System Prompt
|
||||
|
||||
The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created.
|
||||
@@ -210,7 +265,7 @@ Follow these steps for each interaction:
|
||||
- If any new information was gathered during the interaction, update your memory as follows:
|
||||
a) Create entities for recurring organizations, people, and significant events
|
||||
b) Connect them to the current entities using relations
|
||||
b) Store facts about them as observations
|
||||
c) Store facts about them as observations
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
@@ -189,7 +189,7 @@ const knowledgeGraphManager = new KnowledgeGraphManager();
|
||||
// The server instance and tools exposed to Claude
|
||||
const server = new Server({
|
||||
name: "memory-server",
|
||||
version: "1.0.0",
|
||||
version: "0.6.3",
|
||||
}, {
|
||||
capabilities: {
|
||||
tools: {},
|
||||
@@ -416,4 +416,4 @@ async function main() {
|
||||
main().catch((error) => {
|
||||
console.error("Fatal error in main():", error);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM node:22.12-alpine AS builder
|
||||
|
||||
COPY src/postgres /app
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
COPY --from=builder /app/dist /app/dist
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,77 +0,0 @@
|
||||
# PostgreSQL
|
||||
|
||||
A Model Context Protocol server that provides read-only access to PostgreSQL databases. This server enables LLMs to inspect database schemas and execute read-only queries.
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **query**
|
||||
- Execute read-only SQL queries against the connected database
|
||||
- Input: `sql` (string): The SQL query to execute
|
||||
- All queries are executed within a READ ONLY transaction
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides schema information for each table in the database:
|
||||
|
||||
- **Table Schemas** (`postgres://<host>/<table>/schema`)
|
||||
- JSON schema information for each table
|
||||
- Includes column names and data types
|
||||
- Automatically discovered from database metadata
|
||||
|
||||
## Usage with Claude Desktop
|
||||
|
||||
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
|
||||
|
||||
### Docker
|
||||
|
||||
* when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost)
|
||||
* username/password can be added to the postgresql url with `postgresql://user:password@host:port/db-name`
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"mcp/postgres",
|
||||
"postgresql://host.docker.internal:5432/mydb"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-postgres",
|
||||
"postgresql://localhost/mydb"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Replace `/mydb` with your database name.
|
||||
|
||||
## Building
|
||||
|
||||
Docker:
|
||||
|
||||
```sh
|
||||
docker build -t mcp/postgres -f src/postgres/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,143 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListResourcesRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ReadResourceRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import pg from "pg";
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: "example-servers/postgres",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
resources: {},
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length === 0) {
|
||||
console.error("Please provide a database URL as a command-line argument");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const databaseUrl = args[0];
|
||||
|
||||
const resourceBaseUrl = new URL(databaseUrl);
|
||||
resourceBaseUrl.protocol = "postgres:";
|
||||
resourceBaseUrl.password = "";
|
||||
|
||||
const pool = new pg.Pool({
|
||||
connectionString: databaseUrl,
|
||||
});
|
||||
|
||||
const SCHEMA_PATH = "schema";
|
||||
|
||||
server.setRequestHandler(ListResourcesRequestSchema, async () => {
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
const result = await client.query(
|
||||
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'",
|
||||
);
|
||||
return {
|
||||
resources: result.rows.map((row) => ({
|
||||
uri: new URL(`${row.table_name}/${SCHEMA_PATH}`, resourceBaseUrl).href,
|
||||
mimeType: "application/json",
|
||||
name: `"${row.table_name}" database schema`,
|
||||
})),
|
||||
};
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
});
|
||||
|
||||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
const resourceUrl = new URL(request.params.uri);
|
||||
|
||||
const pathComponents = resourceUrl.pathname.split("/");
|
||||
const schema = pathComponents.pop();
|
||||
const tableName = pathComponents.pop();
|
||||
|
||||
if (schema !== SCHEMA_PATH) {
|
||||
throw new Error("Invalid resource URI");
|
||||
}
|
||||
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
const result = await client.query(
|
||||
"SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1",
|
||||
[tableName],
|
||||
);
|
||||
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: request.params.uri,
|
||||
mimeType: "application/json",
|
||||
text: JSON.stringify(result.rows, null, 2),
|
||||
},
|
||||
],
|
||||
};
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
});
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "query",
|
||||
description: "Run a read-only SQL query",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
sql: { type: "string" },
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "query") {
|
||||
const sql = request.params.arguments?.sql as string;
|
||||
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query("BEGIN TRANSACTION READ ONLY");
|
||||
const result = await client.query(sql);
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(result.rows, null, 2) }],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
throw error;
|
||||
} finally {
|
||||
client
|
||||
.query("ROLLBACK")
|
||||
.catch((error) =>
|
||||
console.warn("Could not roll back transaction:", error),
|
||||
);
|
||||
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
throw new Error(`Unknown tool: ${request.params.name}`);
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
}
|
||||
|
||||
runServer().catch(console.error);
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-postgres",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for interacting with PostgreSQL databases",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-postgres": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"pg": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/pg": "^8.11.10",
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
FROM node:22-bookworm-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
# for arm64 support we need to install chromium provided by debian
|
||||
# npm ERR! The chromium binary is not available for arm64.
|
||||
# https://github.com/puppeteer/puppeteer/issues/7740
|
||||
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y wget gnupg && \
|
||||
apt-get install -y fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf libxss1 \
|
||||
libgtk2.0-0 libnss3 libatk-bridge2.0-0 libdrm2 libxkbcommon0 libgbm1 libasound2 && \
|
||||
apt-get install -y chromium && \
|
||||
apt-get clean
|
||||
|
||||
COPY src/puppeteer /project
|
||||
COPY tsconfig.json /tsconfig.json
|
||||
|
||||
WORKDIR /project
|
||||
|
||||
RUN npm install
|
||||
|
||||
ENTRYPOINT ["node", "dist/index.js"]
|
||||
@@ -1,143 +0,0 @@
|
||||
# Puppeteer
|
||||
|
||||
A Model Context Protocol server that provides browser automation capabilities using Puppeteer. This server enables LLMs to interact with web pages, take screenshots, and execute JavaScript in a real browser environment.
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **puppeteer_navigate**
|
||||
- Navigate to any URL in the browser
|
||||
- Inputs:
|
||||
- `url` (string, required): URL to navigate to
|
||||
- `launchOptions` (object, optional): PuppeteerJS LaunchOptions. Default null. If changed and not null, browser restarts. Example: `{ headless: true, args: ['--user-data-dir="C:/Data"'] }`
|
||||
- `allowDangerous` (boolean, optional): Allow dangerous LaunchOptions that reduce security. When false, dangerous args like `--no-sandbox`, `--disable-web-security` will throw errors. Default false.
|
||||
|
||||
- **puppeteer_screenshot**
|
||||
- Capture screenshots of the entire page or specific elements
|
||||
- Inputs:
|
||||
- `name` (string, required): Name for the screenshot
|
||||
- `selector` (string, optional): CSS selector for element to screenshot
|
||||
- `width` (number, optional, default: 800): Screenshot width
|
||||
- `height` (number, optional, default: 600): Screenshot height
|
||||
|
||||
- **puppeteer_click**
|
||||
- Click elements on the page
|
||||
- Input: `selector` (string): CSS selector for element to click
|
||||
|
||||
- **puppeteer_hover**
|
||||
- Hover elements on the page
|
||||
- Input: `selector` (string): CSS selector for element to hover
|
||||
|
||||
- **puppeteer_fill**
|
||||
- Fill out input fields
|
||||
- Inputs:
|
||||
- `selector` (string): CSS selector for input field
|
||||
- `value` (string): Value to fill
|
||||
|
||||
- **puppeteer_select**
|
||||
- Select an element with SELECT tag
|
||||
- Inputs:
|
||||
- `selector` (string): CSS selector for element to select
|
||||
- `value` (string): Value to select
|
||||
|
||||
- **puppeteer_evaluate**
|
||||
- Execute JavaScript in the browser console
|
||||
- Input: `script` (string): JavaScript code to execute
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides access to two types of resources:
|
||||
|
||||
1. **Console Logs** (`console://logs`)
|
||||
- Browser console output in text format
|
||||
- Includes all console messages from the browser
|
||||
|
||||
2. **Screenshots** (`screenshot://<name>`)
|
||||
- PNG images of captured screenshots
|
||||
- Accessible via the screenshot name specified during capture
|
||||
|
||||
## Key Features
|
||||
|
||||
- Browser automation
|
||||
- Console log monitoring
|
||||
- Screenshot capabilities
|
||||
- JavaScript execution
|
||||
- Basic web interaction (navigation, clicking, form filling)
|
||||
- Customizable Puppeteer launch options
|
||||
|
||||
## Configuration to use Puppeteer Server
|
||||
Here's the Claude Desktop configuration to use the Puppeter server:
|
||||
|
||||
### Docker
|
||||
|
||||
**NOTE** The docker implementation will use headless chromium, where as the NPX version will open a browser window.
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"puppeteer": {
|
||||
"command": "docker",
|
||||
"args": ["run", "-i", "--rm", "--init", "-e", "DOCKER_CONTAINER=true", "mcp/puppeteer"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"puppeteer": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-puppeteer"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Launch Options
|
||||
|
||||
You can customize Puppeteer's browser behavior in two ways:
|
||||
|
||||
1. **Environment Variable**: Set `PUPPETEER_LAUNCH_OPTIONS` with a JSON-encoded string in the MCP configuration's `env` parameter:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mcp-puppeteer": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-puppeteer"],
|
||||
"env": {
|
||||
"PUPPETEER_LAUNCH_OPTIONS": "{ \"headless\": false, \"executablePath\": \"C:/Program Files/Google/Chrome/Application/chrome.exe\", \"args\": [] }",
|
||||
"ALLOW_DANGEROUS": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. **Tool Call Arguments**: Pass `launchOptions` and `allowDangerous` parameters to the `puppeteer_navigate` tool:
|
||||
|
||||
```json
|
||||
{
|
||||
"url": "https://example.com",
|
||||
"launchOptions": {
|
||||
"headless": false,
|
||||
"defaultViewport": {"width": 1280, "height": 720}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
Docker build:
|
||||
|
||||
```bash
|
||||
docker build -t mcp/puppeteer -f src/puppeteer/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,484 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListResourcesRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ReadResourceRequestSchema,
|
||||
CallToolResult,
|
||||
TextContent,
|
||||
ImageContent,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import puppeteer, { Browser, Page } from "puppeteer";
|
||||
|
||||
// Define the tools once to avoid repetition
|
||||
const TOOLS: Tool[] = [
|
||||
{
|
||||
name: "puppeteer_navigate",
|
||||
description: "Navigate to a URL",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
url: { type: "string", description: "URL to navigate to" },
|
||||
launchOptions: { type: "object", description: "PuppeteerJS LaunchOptions. Default null. If changed and not null, browser restarts. Example: { headless: true, args: ['--no-sandbox'] }" },
|
||||
allowDangerous: { type: "boolean", description: "Allow dangerous LaunchOptions that reduce security. When false, dangerous args like --no-sandbox will throw errors. Default false." },
|
||||
},
|
||||
required: ["url"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_screenshot",
|
||||
description: "Take a screenshot of the current page or a specific element",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string", description: "Name for the screenshot" },
|
||||
selector: { type: "string", description: "CSS selector for element to screenshot" },
|
||||
width: { type: "number", description: "Width in pixels (default: 800)" },
|
||||
height: { type: "number", description: "Height in pixels (default: 600)" },
|
||||
},
|
||||
required: ["name"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_click",
|
||||
description: "Click an element on the page",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
selector: { type: "string", description: "CSS selector for element to click" },
|
||||
},
|
||||
required: ["selector"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_fill",
|
||||
description: "Fill out an input field",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
selector: { type: "string", description: "CSS selector for input field" },
|
||||
value: { type: "string", description: "Value to fill" },
|
||||
},
|
||||
required: ["selector", "value"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_select",
|
||||
description: "Select an element on the page with Select tag",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
selector: { type: "string", description: "CSS selector for element to select" },
|
||||
value: { type: "string", description: "Value to select" },
|
||||
},
|
||||
required: ["selector", "value"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_hover",
|
||||
description: "Hover an element on the page",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
selector: { type: "string", description: "CSS selector for element to hover" },
|
||||
},
|
||||
required: ["selector"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "puppeteer_evaluate",
|
||||
description: "Execute JavaScript in the browser console",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
script: { type: "string", description: "JavaScript code to execute" },
|
||||
},
|
||||
required: ["script"],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Global state
|
||||
let browser: Browser | null;
|
||||
let page: Page | null;
|
||||
const consoleLogs: string[] = [];
|
||||
const screenshots = new Map<string, string>();
|
||||
let previousLaunchOptions: any = null;
|
||||
|
||||
async function ensureBrowser({ launchOptions, allowDangerous }: any) {
|
||||
|
||||
const DANGEROUS_ARGS = [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--single-process',
|
||||
'--disable-web-security',
|
||||
'--ignore-certificate-errors',
|
||||
'--disable-features=IsolateOrigins',
|
||||
'--disable-site-isolation-trials',
|
||||
'--allow-running-insecure-content'
|
||||
];
|
||||
|
||||
// Parse environment config safely
|
||||
let envConfig = {};
|
||||
try {
|
||||
envConfig = JSON.parse(process.env.PUPPETEER_LAUNCH_OPTIONS || '{}');
|
||||
} catch (error: any) {
|
||||
console.warn('Failed to parse PUPPETEER_LAUNCH_OPTIONS:', error?.message || error);
|
||||
}
|
||||
|
||||
// Deep merge environment config with user-provided options
|
||||
const mergedConfig = deepMerge(envConfig, launchOptions || {});
|
||||
|
||||
// Security validation for merged config
|
||||
if (mergedConfig?.args) {
|
||||
const dangerousArgs = mergedConfig.args?.filter?.((arg: string) => DANGEROUS_ARGS.some((dangerousArg: string) => arg.startsWith(dangerousArg)));
|
||||
if (dangerousArgs?.length > 0 && !(allowDangerous || (process.env.ALLOW_DANGEROUS === 'true'))) {
|
||||
throw new Error(`Dangerous browser arguments detected: ${dangerousArgs.join(', ')}. Fround from environment variable and tool call argument. ` +
|
||||
'Set allowDangerous: true in the tool call arguments to override.');
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if ((browser && !browser.connected) ||
|
||||
(launchOptions && (JSON.stringify(launchOptions) != JSON.stringify(previousLaunchOptions)))) {
|
||||
await browser?.close();
|
||||
browser = null;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
browser = null;
|
||||
}
|
||||
|
||||
previousLaunchOptions = launchOptions;
|
||||
|
||||
if (!browser) {
|
||||
const npx_args = { headless: false }
|
||||
const docker_args = { headless: true, args: ["--no-sandbox", "--single-process", "--no-zygote"] }
|
||||
browser = await puppeteer.launch(deepMerge(
|
||||
process.env.DOCKER_CONTAINER ? docker_args : npx_args,
|
||||
mergedConfig
|
||||
));
|
||||
const pages = await browser.pages();
|
||||
page = pages[0];
|
||||
|
||||
page.on("console", (msg) => {
|
||||
const logEntry = `[${msg.type()}] ${msg.text()}`;
|
||||
consoleLogs.push(logEntry);
|
||||
server.notification({
|
||||
method: "notifications/resources/updated",
|
||||
params: { uri: "console://logs" },
|
||||
});
|
||||
});
|
||||
}
|
||||
return page!;
|
||||
}
|
||||
|
||||
// Deep merge utility function
|
||||
function deepMerge(target: any, source: any): any {
|
||||
const output = Object.assign({}, target);
|
||||
if (typeof target !== 'object' || typeof source !== 'object') return source;
|
||||
|
||||
for (const key of Object.keys(source)) {
|
||||
const targetVal = target[key];
|
||||
const sourceVal = source[key];
|
||||
if (Array.isArray(targetVal) && Array.isArray(sourceVal)) {
|
||||
// Deduplicate args/ignoreDefaultArgs, prefer source values
|
||||
output[key] = [...new Set([
|
||||
...(key === 'args' || key === 'ignoreDefaultArgs' ?
|
||||
targetVal.filter((arg: string) => !sourceVal.some((launchArg: string) => arg.startsWith('--') && launchArg.startsWith(arg.split('=')[0]))) :
|
||||
targetVal),
|
||||
...sourceVal
|
||||
])];
|
||||
} else if (sourceVal instanceof Object && key in target) {
|
||||
output[key] = deepMerge(targetVal, sourceVal);
|
||||
} else {
|
||||
output[key] = sourceVal;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
mcpHelper: {
|
||||
logs: string[],
|
||||
originalConsole: Partial<typeof console>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function handleToolCall(name: string, args: any): Promise<CallToolResult> {
|
||||
const page = await ensureBrowser(args);
|
||||
|
||||
switch (name) {
|
||||
case "puppeteer_navigate":
|
||||
await page.goto(args.url);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Navigated to ${args.url}`,
|
||||
}],
|
||||
isError: false,
|
||||
};
|
||||
|
||||
case "puppeteer_screenshot": {
|
||||
const width = args.width ?? 800;
|
||||
const height = args.height ?? 600;
|
||||
await page.setViewport({ width, height });
|
||||
|
||||
const screenshot = await (args.selector ?
|
||||
(await page.$(args.selector))?.screenshot({ encoding: "base64" }) :
|
||||
page.screenshot({ encoding: "base64", fullPage: false }));
|
||||
|
||||
if (!screenshot) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: args.selector ? `Element not found: ${args.selector}` : "Screenshot failed",
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
screenshots.set(args.name, screenshot as string);
|
||||
server.notification({
|
||||
method: "notifications/resources/list_changed",
|
||||
});
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Screenshot '${args.name}' taken at ${width}x${height}`,
|
||||
} as TextContent,
|
||||
{
|
||||
type: "image",
|
||||
data: screenshot,
|
||||
mimeType: "image/png",
|
||||
} as ImageContent,
|
||||
],
|
||||
isError: false,
|
||||
};
|
||||
}
|
||||
|
||||
case "puppeteer_click":
|
||||
try {
|
||||
await page.click(args.selector);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Clicked: ${args.selector}`,
|
||||
}],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Failed to click ${args.selector}: ${(error as Error).message}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
case "puppeteer_fill":
|
||||
try {
|
||||
await page.waitForSelector(args.selector);
|
||||
await page.type(args.selector, args.value);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Filled ${args.selector} with: ${args.value}`,
|
||||
}],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Failed to fill ${args.selector}: ${(error as Error).message}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
case "puppeteer_select":
|
||||
try {
|
||||
await page.waitForSelector(args.selector);
|
||||
await page.select(args.selector, args.value);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Selected ${args.selector} with: ${args.value}`,
|
||||
}],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Failed to select ${args.selector}: ${(error as Error).message}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
case "puppeteer_hover":
|
||||
try {
|
||||
await page.waitForSelector(args.selector);
|
||||
await page.hover(args.selector);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Hovered ${args.selector}`,
|
||||
}],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Failed to hover ${args.selector}: ${(error as Error).message}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
case "puppeteer_evaluate":
|
||||
try {
|
||||
await page.evaluate(() => {
|
||||
window.mcpHelper = {
|
||||
logs: [],
|
||||
originalConsole: { ...console },
|
||||
};
|
||||
|
||||
['log', 'info', 'warn', 'error'].forEach(method => {
|
||||
(console as any)[method] = (...args: any[]) => {
|
||||
window.mcpHelper.logs.push(`[${method}] ${args.join(' ')}`);
|
||||
(window.mcpHelper.originalConsole as any)[method](...args);
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
const result = await page.evaluate(args.script);
|
||||
|
||||
const logs = await page.evaluate(() => {
|
||||
Object.assign(console, window.mcpHelper.originalConsole);
|
||||
const logs = window.mcpHelper.logs;
|
||||
delete (window as any).mcpHelper;
|
||||
return logs;
|
||||
});
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Execution result:\n${JSON.stringify(result, null, 2)}\n\nConsole output:\n${logs.join('\n')}`,
|
||||
},
|
||||
],
|
||||
isError: false,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Script execution failed: ${(error as Error).message}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Unknown tool: ${name}`,
|
||||
}],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: "example-servers/puppeteer",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
resources: {},
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
// Setup request handlers
|
||||
server.setRequestHandler(ListResourcesRequestSchema, async () => ({
|
||||
resources: [
|
||||
{
|
||||
uri: "console://logs",
|
||||
mimeType: "text/plain",
|
||||
name: "Browser console logs",
|
||||
},
|
||||
...Array.from(screenshots.keys()).map(name => ({
|
||||
uri: `screenshot://${name}`,
|
||||
mimeType: "image/png",
|
||||
name: `Screenshot: ${name}`,
|
||||
})),
|
||||
],
|
||||
}));
|
||||
|
||||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
const uri = request.params.uri.toString();
|
||||
|
||||
if (uri === "console://logs") {
|
||||
return {
|
||||
contents: [{
|
||||
uri,
|
||||
mimeType: "text/plain",
|
||||
text: consoleLogs.join("\n"),
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
if (uri.startsWith("screenshot://")) {
|
||||
const name = uri.split("://")[1];
|
||||
const screenshot = screenshots.get(name);
|
||||
if (screenshot) {
|
||||
return {
|
||||
contents: [{
|
||||
uri,
|
||||
mimeType: "image/png",
|
||||
blob: screenshot,
|
||||
}],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Resource not found: ${uri}`);
|
||||
});
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: TOOLS,
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) =>
|
||||
handleToolCall(request.params.name, request.params.arguments ?? {})
|
||||
);
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
}
|
||||
|
||||
runServer().catch(console.error);
|
||||
|
||||
process.stdin.on("close", () => {
|
||||
console.error("Puppeteer MCP Server closed");
|
||||
server.close();
|
||||
});
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-puppeteer",
|
||||
"version": "0.6.2",
|
||||
"description": "MCP server for browser automation using Puppeteer",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-puppeteer": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1",
|
||||
"puppeteer": "^23.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
FROM node:22.12-alpine as builder
|
||||
|
||||
COPY src/redis /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm npm install
|
||||
|
||||
RUN npm run build
|
||||
|
||||
FROM node:22-alpine AS release
|
||||
|
||||
COPY --from=builder /app/build /app/build
|
||||
COPY --from=builder /app/package.json /app/package.json
|
||||
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm ci --ignore-scripts --omit-dev
|
||||
|
||||
ENTRYPOINT ["node", "build/index.js"]
|
||||
@@ -1,105 +0,0 @@
|
||||
# Redis
|
||||
|
||||
A Model Context Protocol server that provides access to Redis databases. This server enables LLMs to interact with Redis key-value stores through a set of standardized tools.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. Redis server must be installed and running
|
||||
- [Download Redis](https://redis.io/download)
|
||||
- For Windows users: Use [Windows Subsystem for Linux (WSL)](https://redis.io/docs/getting-started/installation/install-redis-on-windows/) or [Memurai](https://www.memurai.com/) (Redis-compatible Windows server)
|
||||
- Default port: 6379
|
||||
|
||||
## Common Issues & Solutions
|
||||
|
||||
### Connection Errors
|
||||
|
||||
**ECONNREFUSED**
|
||||
- **Cause**: Redis server is not running or unreachable
|
||||
- **Solution**:
|
||||
- Verify Redis is running: `redis-cli ping` should return "PONG"
|
||||
- Check Redis service status: `systemctl status redis` (Linux) or `brew services list` (macOS)
|
||||
- Ensure correct port (default 6379) is not blocked by firewall
|
||||
- Verify Redis URL format: `redis://hostname:port`
|
||||
|
||||
### Server Behavior
|
||||
|
||||
- The server implements exponential backoff with a maximum of 5 retries
|
||||
- Initial retry delay: 1 second, maximum delay: 30 seconds
|
||||
- Server will exit after max retries to prevent infinite reconnection loops
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **set**
|
||||
- Set a Redis key-value pair with optional expiration
|
||||
- Input:
|
||||
- `key` (string): Redis key
|
||||
- `value` (string): Value to store
|
||||
- `expireSeconds` (number, optional): Expiration time in seconds
|
||||
|
||||
- **get**
|
||||
- Get value by key from Redis
|
||||
- Input: `key` (string): Redis key to retrieve
|
||||
|
||||
- **delete**
|
||||
- Delete one or more keys from Redis
|
||||
- Input: `key` (string | string[]): Key or array of keys to delete
|
||||
|
||||
- **list**
|
||||
- List Redis keys matching a pattern
|
||||
- Input: `pattern` (string, optional): Pattern to match keys (default: *)
|
||||
|
||||
## Usage with Claude Desktop
|
||||
|
||||
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
|
||||
|
||||
### Docker
|
||||
|
||||
* when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost)
|
||||
* Redis URL can be specified as an argument, defaults to "redis://localhost:6379"
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"redis": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"mcp/redis",
|
||||
"redis://host.docker.internal:6379"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### NPX
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"redis": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-redis",
|
||||
"redis://localhost:6379"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
Docker:
|
||||
|
||||
```sh
|
||||
docker build -t mcp/redis -f src/redis/Dockerfile .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-redis",
|
||||
"version": "0.1.0",
|
||||
"description": "MCP server for using Redis",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"redis": "./build/index.js"
|
||||
},
|
||||
"files": [
|
||||
"build"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x build/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.7.0",
|
||||
"@types/node": "^22.10.2",
|
||||
"@types/redis": "^4.0.10",
|
||||
"redis": "^4.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.7.2"
|
||||
}
|
||||
}
|
||||
@@ -1,286 +0,0 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import { z } from "zod";
|
||||
import { createClient } from 'redis';
|
||||
|
||||
// Configuration
|
||||
const REDIS_URL = process.argv[2] || "redis://localhost:6379";
|
||||
const MAX_RETRIES = 5;
|
||||
const MIN_RETRY_DELAY = 1000; // 1 second
|
||||
const MAX_RETRY_DELAY = 30000; // 30 seconds
|
||||
|
||||
// Create Redis client with retry strategy
|
||||
const redisClient = createClient({
|
||||
url: REDIS_URL,
|
||||
socket: {
|
||||
reconnectStrategy: (retries) => {
|
||||
if (retries >= MAX_RETRIES) {
|
||||
console.error(`Maximum retries (${MAX_RETRIES}) reached. Giving up.`);
|
||||
return new Error('Max retries reached');
|
||||
}
|
||||
const delay = Math.min(Math.pow(2, retries) * MIN_RETRY_DELAY, MAX_RETRY_DELAY);
|
||||
console.error(`Reconnection attempt ${retries + 1}/${MAX_RETRIES} in ${delay}ms`);
|
||||
return delay;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Define Zod schemas for validation
|
||||
const SetArgumentsSchema = z.object({
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
expireSeconds: z.number().optional(),
|
||||
});
|
||||
|
||||
const GetArgumentsSchema = z.object({
|
||||
key: z.string(),
|
||||
});
|
||||
|
||||
const DeleteArgumentsSchema = z.object({
|
||||
key: z.string().or(z.array(z.string())),
|
||||
});
|
||||
|
||||
const ListArgumentsSchema = z.object({
|
||||
pattern: z.string().default("*"),
|
||||
});
|
||||
|
||||
// Create server instance
|
||||
const server = new Server(
|
||||
{
|
||||
name: "redis",
|
||||
version: "0.0.1"
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// List available tools
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "set",
|
||||
description: "Set a Redis key-value pair with optional expiration",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
key: {
|
||||
type: "string",
|
||||
description: "Redis key",
|
||||
},
|
||||
value: {
|
||||
type: "string",
|
||||
description: "Value to store",
|
||||
},
|
||||
expireSeconds: {
|
||||
type: "number",
|
||||
description: "Optional expiration time in seconds",
|
||||
},
|
||||
},
|
||||
required: ["key", "value"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "get",
|
||||
description: "Get value by key from Redis",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
key: {
|
||||
type: "string",
|
||||
description: "Redis key to retrieve",
|
||||
},
|
||||
},
|
||||
required: ["key"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "delete",
|
||||
description: "Delete one or more keys from Redis",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
key: {
|
||||
oneOf: [
|
||||
{ type: "string" },
|
||||
{ type: "array", items: { type: "string" } }
|
||||
],
|
||||
description: "Key or array of keys to delete",
|
||||
},
|
||||
},
|
||||
required: ["key"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "list",
|
||||
description: "List Redis keys matching a pattern",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
pattern: {
|
||||
type: "string",
|
||||
description: "Pattern to match keys (default: *)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
// Handle tool execution
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
try {
|
||||
if (name === "set") {
|
||||
const { key, value, expireSeconds } = SetArgumentsSchema.parse(args);
|
||||
|
||||
if (expireSeconds) {
|
||||
await redisClient.setEx(key, expireSeconds, value);
|
||||
} else {
|
||||
await redisClient.set(key, value);
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Successfully set key: ${key}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (name === "get") {
|
||||
const { key } = GetArgumentsSchema.parse(args);
|
||||
const value = await redisClient.get(key);
|
||||
|
||||
if (value === null) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Key not found: ${key}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `${value}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (name === "delete") {
|
||||
const { key } = DeleteArgumentsSchema.parse(args);
|
||||
|
||||
if (Array.isArray(key)) {
|
||||
await redisClient.del(key);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Successfully deleted ${key.length} keys`,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else {
|
||||
await redisClient.del(key);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Successfully deleted key: ${key}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
} else if (name === "list") {
|
||||
const { pattern } = ListArgumentsSchema.parse(args);
|
||||
const keys = await redisClient.keys(pattern);
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: keys.length > 0
|
||||
? `Found keys:\n${keys.join('\n')}`
|
||||
: "No keys found matching pattern",
|
||||
},
|
||||
],
|
||||
};
|
||||
} else {
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
throw new Error(
|
||||
`Invalid arguments: ${error.errors
|
||||
.map((e) => `${e.path.join(".")}: ${e.message}`)
|
||||
.join(", ")}`
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Start the server
|
||||
async function main() {
|
||||
try {
|
||||
// Set up Redis event handlers
|
||||
redisClient.on('error', (err: Error) => {
|
||||
console.error('Redis Client Error:', err);
|
||||
});
|
||||
|
||||
redisClient.on('connect', () => {
|
||||
console.error(`Connected to Redis at ${REDIS_URL}`);
|
||||
});
|
||||
|
||||
redisClient.on('reconnecting', () => {
|
||||
console.error('Attempting to reconnect to Redis...');
|
||||
});
|
||||
|
||||
redisClient.on('end', () => {
|
||||
console.error('Redis connection closed');
|
||||
});
|
||||
|
||||
// Connect to Redis
|
||||
await redisClient.connect();
|
||||
|
||||
// Set up MCP server
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Redis MCP Server running on stdio");
|
||||
} catch (error) {
|
||||
console.error("Error during startup:", error);
|
||||
await cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup function
|
||||
async function cleanup() {
|
||||
try {
|
||||
await redisClient.quit();
|
||||
} catch (error) {
|
||||
console.error("Error during cleanup:", error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Handle process termination
|
||||
process.on('SIGINT', cleanup);
|
||||
process.on('SIGTERM', cleanup);
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Fatal error in main():", error);
|
||||
cleanup();
|
||||
});
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "Node16",
|
||||
"moduleResolution": "Node16",
|
||||
"outDir": "./build",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
3.10
|
||||
@@ -1,37 +0,0 @@
|
||||
# Use a Python image with uv pre-installed
|
||||
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
|
||||
|
||||
# Install the project into `/app`
|
||||
WORKDIR /app
|
||||
|
||||
# Enable bytecode compilation
|
||||
ENV UV_COMPILE_BYTECODE=1
|
||||
|
||||
# Copy from the cache instead of linking since it's a mounted volume
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
# Install the project's dependencies using the lockfile and settings
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
uv sync --frozen --no-install-project --no-dev --no-editable
|
||||
|
||||
# Then, add the rest of the project source code and install it
|
||||
# Installing separately from its dependencies allows optimal layer caching
|
||||
ADD . /app
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-dev --no-editable
|
||||
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=uv /root/.local /root/.local
|
||||
COPY --from=uv --chown=app:app /app/.venv /app/.venv
|
||||
|
||||
# Place executables in the environment at the front of the path
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
# when running the container, add --db-path and a bind mount to the host's db file
|
||||
ENTRYPOINT ["mcp-server-sentry"]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user