mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-04-17 23:53:24 +02:00
Merge branch 'main' into patch-1
This commit is contained in:
221
.github/workflows/release.yml
vendored
Normal file
221
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
name: Automatic Release Creation
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 10 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
create-metadata:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
hash: ${{ steps.last-release.outputs.hash }}
|
||||||
|
version: ${{ steps.create-version.outputs.version}}
|
||||||
|
npm_packages: ${{ steps.create-npm-packages.outputs.npm_packages}}
|
||||||
|
pypi_packages: ${{ steps.create-pypi-packages.outputs.pypi_packages}}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Get last release hash
|
||||||
|
id: last-release
|
||||||
|
run: |
|
||||||
|
HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1")
|
||||||
|
echo "hash=${HASH}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using last release hash: ${HASH}"
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
|
- name: Create version name
|
||||||
|
id: create-version
|
||||||
|
run: |
|
||||||
|
VERSION=$(uv run --script scripts/release.py generate-version)
|
||||||
|
echo "version $VERSION"
|
||||||
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Create notes
|
||||||
|
run: |
|
||||||
|
HASH="${{ steps.last-release.outputs.hash }}"
|
||||||
|
uv run --script scripts/release.py generate-notes --directory src/ $HASH > RELEASE_NOTES.md
|
||||||
|
cat RELEASE_NOTES.md
|
||||||
|
|
||||||
|
- name: Release notes
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-notes
|
||||||
|
path: RELEASE_NOTES.md
|
||||||
|
|
||||||
|
- name: Create python matrix
|
||||||
|
id: create-pypi-packages
|
||||||
|
run: |
|
||||||
|
HASH="${{ steps.last-release.outputs.hash }}"
|
||||||
|
PYPI=$(uv run --script scripts/release.py generate-matrix --pypi --directory src $HASH)
|
||||||
|
echo "pypi_packages $PYPI"
|
||||||
|
echo "pypi_packages=$PYPI" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Create npm matrix
|
||||||
|
id: create-npm-packages
|
||||||
|
run: |
|
||||||
|
HASH="${{ steps.last-release.outputs.hash }}"
|
||||||
|
NPM=$(uv run --script scripts/release.py generate-matrix --npm --directory src $HASH)
|
||||||
|
echo "npm_packages $NPM"
|
||||||
|
echo "npm_packages=$NPM" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
update-packages:
|
||||||
|
needs: [create-metadata]
|
||||||
|
if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment: release
|
||||||
|
outputs:
|
||||||
|
changes_made: ${{ steps.commit.outputs.changes_made }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
|
- name: Update packages
|
||||||
|
run: |
|
||||||
|
HASH="${{ needs.create-metadata.outputs.hash }}"
|
||||||
|
uv run --script scripts/release.py update-packages --directory src/ $HASH
|
||||||
|
|
||||||
|
- name: Configure git
|
||||||
|
run: |
|
||||||
|
git config --global user.name "GitHub Actions"
|
||||||
|
git config --global user.email "actions@github.com"
|
||||||
|
|
||||||
|
- name: Commit changes
|
||||||
|
id: commit
|
||||||
|
run: |
|
||||||
|
VERSION="${{ needs.create-metadata.outputs.version }}"
|
||||||
|
git add -u
|
||||||
|
if git diff-index --quiet HEAD; then
|
||||||
|
echo "changes_made=false" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
git commit -m 'Automatic update of packages'
|
||||||
|
git tag -a "$VERSION" -m "Release $VERSION"
|
||||||
|
git push origin "$VERSION"
|
||||||
|
echo "changes_made=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
publish-pypi:
|
||||||
|
needs: [update-packages, create-metadata]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
package: ${{ fromJson(needs.create-metadata.outputs.pypi_packages) }}
|
||||||
|
name: Build ${{ matrix.package }}
|
||||||
|
environment: release
|
||||||
|
permissions:
|
||||||
|
id-token: write # Required for trusted publishing
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create-metadata.outputs.version }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version-file: "src/${{ matrix.package }}/.python-version"
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: uv sync --frozen --all-extras --dev
|
||||||
|
|
||||||
|
- name: Run pyright
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: uv run --frozen pyright
|
||||||
|
|
||||||
|
- name: Build package
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: uv build
|
||||||
|
|
||||||
|
- name: Publish package to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
packages-dir: src/${{ matrix.package }}/dist
|
||||||
|
|
||||||
|
publish-npm:
|
||||||
|
needs: [update-packages, create-metadata]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
package: ${{ fromJson(needs.create-metadata.outputs.npm_packages) }}
|
||||||
|
name: Build ${{ matrix.package }}
|
||||||
|
environment: release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create-metadata.outputs.version }}
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
cache: npm
|
||||||
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Check if version exists on npm
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: |
|
||||||
|
VERSION=$(jq -r .version package.json)
|
||||||
|
if npm view --json | jq -e --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then
|
||||||
|
echo "Version $VERSION already exists on npm"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Version $VERSION is new, proceeding with publish"
|
||||||
|
|
||||||
|
- name: Build package
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Publish package
|
||||||
|
working-directory: src/${{ matrix.package }}
|
||||||
|
run: |
|
||||||
|
npm publish --access public
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
|
create-release:
|
||||||
|
needs: [update-packages, create-metadata, publish-pypi, publish-npm]
|
||||||
|
if: needs.update-packages.outputs.changes_made == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment: release
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download release notes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-notes
|
||||||
|
|
||||||
|
- name: Create release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN}}
|
||||||
|
run: |
|
||||||
|
VERSION="${{ needs.create-metadata.outputs.version }}"
|
||||||
|
gh release create "$VERSION" \
|
||||||
|
--title "Release $VERSION" \
|
||||||
|
--notes-file RELEASE_NOTES.md
|
||||||
|
|
||||||
|
- name: Docker MCP images
|
||||||
|
uses: peter-evans/repository-dispatch@v3
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.DOCKER_TOKEN }}
|
||||||
|
repository: docker/labs-ai-tools-for-devs
|
||||||
|
event-type: build-mcp-images
|
||||||
|
client-payload: '{"ref": "${{ needs.create-metadata.outputs.version }}"}'
|
||||||
@@ -10,6 +10,8 @@ The repository contains reference implementations, as well as a list of communit
|
|||||||
We generally don't accept new servers into the repository. We do accept pull requests to the [README.md](./README.md)
|
We generally don't accept new servers into the repository. We do accept pull requests to the [README.md](./README.md)
|
||||||
adding a reference to your servers.
|
adding a reference to your servers.
|
||||||
|
|
||||||
|
Please keep lists in alphabetical order to minimize merge conflicts when adding new items.
|
||||||
|
|
||||||
- Check the [modelcontextprotocol.io](https://modelcontextprotocol.io) documentation
|
- Check the [modelcontextprotocol.io](https://modelcontextprotocol.io) documentation
|
||||||
- Ensure your server doesn't duplicate existing functionality
|
- Ensure your server doesn't duplicate existing functionality
|
||||||
- Consider whether your server would be generally useful to others
|
- Consider whether your server would be generally useful to others
|
||||||
|
|||||||
209
README.md
209
README.md
@@ -6,9 +6,11 @@ to community built servers and additional resources.
|
|||||||
The servers in this repository showcase the versatility and extensibility of MCP, demonstrating how it can be used to give Large Language Models (LLMs) secure, controlled access to tools and data sources.
|
The servers in this repository showcase the versatility and extensibility of MCP, demonstrating how it can be used to give Large Language Models (LLMs) secure, controlled access to tools and data sources.
|
||||||
Each MCP server is implemented with either the [Typescript MCP SDK](https://github.com/modelcontextprotocol/typescript-sdk) or [Python MCP SDK](https://github.com/modelcontextprotocol/python-sdk).
|
Each MCP server is implemented with either the [Typescript MCP SDK](https://github.com/modelcontextprotocol/typescript-sdk) or [Python MCP SDK](https://github.com/modelcontextprotocol/python-sdk).
|
||||||
|
|
||||||
|
> Note: Lists in this README are maintained in alphabetical order to minimize merge conflicts when adding new items.
|
||||||
|
|
||||||
## 🌟 Reference Servers
|
## 🌟 Reference Servers
|
||||||
|
|
||||||
These servers aim to demonstrate MCP features and the Typescript and Python SDK.
|
These servers aim to demonstrate MCP features and the TypeScript and Python SDKs.
|
||||||
|
|
||||||
- **[AWS KB Retrieval](src/aws-kb-retrieval-server)** - Retrieval from AWS Knowledge Base using Bedrock Agent Runtime
|
- **[AWS KB Retrieval](src/aws-kb-retrieval-server)** - Retrieval from AWS Knowledge Base using Bedrock Agent Runtime
|
||||||
- **[Brave Search](src/brave-search)** - Web and local search using Brave's Search API
|
- **[Brave Search](src/brave-search)** - Web and local search using Brave's Search API
|
||||||
@@ -36,22 +38,34 @@ These servers aim to demonstrate MCP features and the Typescript and Python SDK.
|
|||||||
|
|
||||||
Official integrations are maintained by companies building production ready MCP servers for their platforms.
|
Official integrations are maintained by companies building production ready MCP servers for their platforms.
|
||||||
|
|
||||||
|
- <img height="12" width="12" src="https://apify.com/favicon.ico" alt="Apify Logo" /> **[Apify](https://github.com/apify/actors-mcp-server)** - [Actors MCP Server](https://apify.com/apify/actors-mcp-server): Use 3,000+ pre-built cloud tools to extract data from websites, e-commerce, social media, search engines, maps, and more
|
||||||
- <img height="12" width="12" src="https://axiom.co/favicon.ico" alt="Axiom Logo" /> **[Axiom](https://github.com/axiomhq/mcp-server-axiom)** - Query and analyze your Axiom logs, traces, and all other event data in natural language
|
- <img height="12" width="12" src="https://axiom.co/favicon.ico" alt="Axiom Logo" /> **[Axiom](https://github.com/axiomhq/mcp-server-axiom)** - Query and analyze your Axiom logs, traces, and all other event data in natural language
|
||||||
- <img height="12" width="12" src="https://browserbase.com/favicon.ico" alt="Browserbase Logo" /> **[Browserbase](https://github.com/browserbase/mcp-server-browserbase)** - Automate browser interactions in the cloud (e.g. web navigation, data extraction, form filling, and more)
|
- <img height="12" width="12" src="https://browserbase.com/favicon.ico" alt="Browserbase Logo" /> **[Browserbase](https://github.com/browserbase/mcp-server-browserbase)** - Automate browser interactions in the cloud (e.g. web navigation, data extraction, form filling, and more)
|
||||||
- <img height="12" width="12" src="https://cdn.simpleicons.org/cloudflare" /> **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1)
|
- <img height="12" width="12" src="https://cdn.simpleicons.org/cloudflare" /> **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1)
|
||||||
- **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account
|
|
||||||
- **[Obsidian Markdown Notes](https://github.com/calclavia/mcp-obsidian)** - Read and search through your Obsidian vault or any directory containing Markdown notes
|
|
||||||
- <img height="12" width="12" src="https://e2b.dev/favicon.ico" alt="E2B Logo" /> **[E2B](https://github.com/e2b-dev/mcp-server)** - Run code in secure sandboxes hosted by [E2B](https://e2b.dev)
|
- <img height="12" width="12" src="https://e2b.dev/favicon.ico" alt="E2B Logo" /> **[E2B](https://github.com/e2b-dev/mcp-server)** - Run code in secure sandboxes hosted by [E2B](https://e2b.dev)
|
||||||
|
- <img height="12" width="12" src="https://esignatures.com/favicon.ico" alt="eSignatures Logo" /> **[eSignatures](https://github.com/esignaturescom/mcp-server-esignatures)** - Contract and template management for drafting, reviewing, and sending binding contracts.
|
||||||
|
- [Eunomia](https://github.com/whataboutyou-ai/eunomia-MCP-server)** - Extension of the Eunomia framework that connects Eunomia instruments with MCP servers
|
||||||
- <img height="12" width="12" src="https://exa.ai/images/favicon-32x32.png" alt="Exa Logo" /> **[Exa](https://github.com/exa-labs/exa-mcp-server)** - Search Engine made for AIs by [Exa](https://exa.ai)
|
- <img height="12" width="12" src="https://exa.ai/images/favicon-32x32.png" alt="Exa Logo" /> **[Exa](https://github.com/exa-labs/exa-mcp-server)** - Search Engine made for AIs by [Exa](https://exa.ai)
|
||||||
|
- <img height="12" width="12" src="https://fireproof.storage/favicon.ico" alt="Fireproof Logo" /> **[Fireproof](https://github.com/fireproof-storage/mcp-database-server)** - Immutable ledger database with live synchronization
|
||||||
|
- <img height="12" width="12" src="https://grafana.com/favicon.ico" alt="Grafana Logo" /> **[Grafana](https://github.com/grafana/mcp-grafana)** - Search dashboards, investigate incidents and query datasources in your Grafana instance
|
||||||
|
- **[IBM wxflows](https://github.com/IBM/wxflows/tree/main/examples/mcp/javascript)** - Tool platform by IBM to build, test and deploy tools for any data source
|
||||||
|
- <img height="12" width="12" src="https://integration.app/favicon.ico" alt="Integration App Icon" /> **[Integration App](https://github.com/integration-app/mcp-server)** - Interact with any other SaaS applications on behalf of your customers.
|
||||||
- <img height="12" width="12" src="https://cdn.simpleicons.org/jetbrains" /> **[JetBrains](https://github.com/JetBrains/mcp-jetbrains)** – Work on your code with JetBrains IDEs
|
- <img height="12" width="12" src="https://cdn.simpleicons.org/jetbrains" /> **[JetBrains](https://github.com/JetBrains/mcp-jetbrains)** – Work on your code with JetBrains IDEs
|
||||||
- **[Needle](https://github.com/JANHMS/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents.
|
- <img height="12" width="12" src="https://kagi.com/favicon.ico" alt="Kagi Logo" /> **[Kagi Search](https://github.com/kagisearch/kagimcp)** - Search the web using Kagi's search API
|
||||||
- **[Neon](https://github.com/neondatabase/mcp-server-neon)** - Interact with the Neon serverless Postgres platform
|
- <img height="12" width="12" src="https://www.meilisearch.com/favicon.ico" alt="Meilisearch Logo" /> **[Meilisearch](https://github.com/meilisearch/meilisearch-mcp)** - Interact & query with Meilisearch (Full-text & semantic search API)
|
||||||
- <img height="12" width="12" src="https://neo4j.com/favicon.ico" alt="Neo4j Logo" /> **[Neo4j](https://github.com/neo4j-contrib/mcp-neo4j/)** - Neo4j graph database server (schema + read/write-cypher) and separate graph database backed memory
|
|
||||||
- <img height="12" width="12" src="https://www.tinybird.co/favicon.ico" alt="Tinybird Logo" /> **[Tinybird](https://github.com/tinybirdco/mcp-tinybird)** - Interact with Tinybird serverless ClickHouse platform
|
|
||||||
- <img height="12" width="12" src="https://pics.fatwang2.com/56912e614b35093426c515860f9f2234.svg" /> [Search1API](https://github.com/fatwang2/search1api-mcp) - One API for Search, Crawling, and Sitemaps
|
|
||||||
- <img height="12" width="12" src="https://qdrant.tech/img/brand-resources-logos/logomark.svg" /> **[Qdrant](https://github.com/qdrant/mcp-server-qdrant/)** - Implement semantic memory layer on top of the Qdrant vector search engine
|
|
||||||
- <img height="12" width="12" src="https://metoro.io/static/images/logos/Metoro.svg" /> **[Metoro](https://github.com/metoro-io/metoro-mcp-server)** - Query and interact with kubernetes environments monitored by Metoro
|
- <img height="12" width="12" src="https://metoro.io/static/images/logos/Metoro.svg" /> **[Metoro](https://github.com/metoro-io/metoro-mcp-server)** - Query and interact with kubernetes environments monitored by Metoro
|
||||||
|
- <img height="12" width="12" src="https://www.motherduck.com/favicon.ico" alt="MotherDuck Logo" /> **[MotherDuck](https://github.com/motherduckdb/mcp-server-motherduck)** - Query and analyze data with MotherDuck and local DuckDB
|
||||||
|
- <img height="12" width="12" src="https://needle-ai.com/images/needle-logo-orange-2-rounded.png" alt="Needle AI Logo" /> **[Needle](https://github.com/needle-ai/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents.
|
||||||
|
- <img height="12" width="12" src="https://neo4j.com/favicon.ico" alt="Neo4j Logo" /> **[Neo4j](https://github.com/neo4j-contrib/mcp-neo4j/)** - Neo4j graph database server (schema + read/write-cypher) and separate graph database backed memory
|
||||||
|
- **[Neon](https://github.com/neondatabase/mcp-server-neon)** - Interact with the Neon serverless Postgres platform
|
||||||
|
- <img height="12" width="12" src="https://oxylabs.io/favicon.ico" alt="Oxylabs Logo" /> **[Oxylabs](https://github.com/oxylabs/oxylabs-mcp)** - Scrape websites with Oxylabs Web API, supporting dynamic rendering and parsing for structured data extraction.
|
||||||
|
- <img height="12" width="12" src="https://qdrant.tech/img/brand-resources-logos/logomark.svg" /> **[Qdrant](https://github.com/qdrant/mcp-server-qdrant/)** - Implement semantic memory layer on top of the Qdrant vector search engine
|
||||||
|
- **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account
|
||||||
|
- <img height="12" width="12" src="https://riza.io/favicon.ico" alt="Riza logo" /> **[Riza](https://github.com/riza-io/riza-mcp)** - Arbitrary code execution and tool-use platform for LLMs by [Riza](https://riza.io)
|
||||||
|
- <img height="12" width="12" src="https://pics.fatwang2.com/56912e614b35093426c515860f9f2234.svg" /> [Search1API](https://github.com/fatwang2/search1api-mcp) - One API for Search, Crawling, and Sitemaps
|
||||||
|
- <img height="12" width="12" src="https://tavily.com/favicon.ico" alt="Tavily Logo" /> **[Tavily](https://github.com/tavily-ai/tavily-mcp)** - Search engine for AI agents (search + extract) powered by [Tavily](https://tavily.com/)
|
||||||
|
- <img height="12" width="12" src="https://www.tinybird.co/favicon.ico" alt="Tinybird Logo" /> **[Tinybird](https://github.com/tinybirdco/mcp-tinybird)** - Interact with Tinybird serverless ClickHouse platform
|
||||||
|
- <img height="12" width="12" src="https://verodat.io/assets/favicon-16x16.png" alt="Verodat Logo" /> **[Verodat](https://github.com/ThinkEvolveSolve/verodat-mcp-server)** - Interact with Verodat AI Ready Data platform
|
||||||
|
|
||||||
### 🌎 Community Servers
|
### 🌎 Community Servers
|
||||||
|
|
||||||
@@ -59,73 +73,146 @@ A growing set of community-developed and maintained servers demonstrates various
|
|||||||
|
|
||||||
> **Note:** Community servers are **untested** and should be used at **your own risk**. They are not affiliated with or endorsed by Anthropic.
|
> **Note:** Community servers are **untested** and should be used at **your own risk**. They are not affiliated with or endorsed by Anthropic.
|
||||||
|
|
||||||
- **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you.
|
- **[AWS S3](https://github.com/aws-samples/sample-mcp-server-s3)** - A sample MCP server for AWS S3 that flexibly fetches objects from S3 such as PDF documents
|
||||||
- **[NS Travel Information](https://github.com/r-huijts/ns-mcp-server)** - Access Dutch Railways (NS) real-time train travel information and disruptions through the official NS API.
|
- **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM
|
||||||
- **[Spotify](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify.
|
- **[Airtable](https://github.com/domdomegg/airtable-mcp-server)** - Read and write access to [Airtable](https://airtable.com/) databases, with schema inspection.
|
||||||
- **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles
|
- **[Airtable](https://github.com/felores/airtable-mcp)** - Airtable Model Context Protocol Server.
|
||||||
- **[Vega-Lite](https://github.com/isaacwasserman/mcp-vegalite-server)** - Generate visualizations from fetched data using the VegaLite format and renderer.
|
- **[AlphaVantage](https://github.com/calvernaz/alphavantage)** - MCP server for stock market data API [AlphaVantage](https://www.alphavantage.co)
|
||||||
- **[Snowflake](https://github.com/isaacwasserman/mcp-snowflake-server)** - This MCP server enables LLMs to interact with Snowflake databases, allowing for secure and controlled data operations.
|
- **[Anki](https://github.com/scorzeth/anki-mcp-server)** - An MCP server for interacting with your [Anki](https://apps.ankiweb.net) decks and cards.
|
||||||
- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** (by DesignComputer) - MySQL database integration in Python with configurable access controls and schema inspection
|
- **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more.
|
||||||
- **[MySQL](https://github.com/benborla/mcp-server-mysql)** (by benborla) - MySQL database integration in NodeJS with configurable access controls and schema inspection
|
- **[ArangoDB](https://github.com/ravenwits/mcp-server-arangodb)** - MCP Server that provides database interaction capabilities through [ArangoDB](https://arangodb.com/).
|
||||||
- **[MSSQL](https://github.com/aekanun2020/mcp-server/)** - MSSQL database integration with configurable access controls and schema inspection
|
- **[Atlassian](https://github.com/sooperset/mcp-atlassian)** - Interact with Atlassian Cloud products (Confluence and Jira) including searching/reading Confluence spaces/pages, accessing Jira issues, and project metadata.
|
||||||
- **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery.
|
- **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery.
|
||||||
- **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities
|
- **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities
|
||||||
- **[Todoist](https://github.com/abhiz123/todoist-mcp-server)** - Interact with Todoist to manage your tasks.
|
- **[Calendar](https://github.com/GongRzhe/Calendar-MCP-Server)** - Google Calendar integration server enabling AI assistants to manage calendar events through natural language interactions.
|
||||||
- **[Tavily search](https://github.com/RamXX/mcp-tavily)** - An MCP server for Tavily's search & news API, with explicit site inclusions/exclusions
|
- **[CFBD API](https://github.com/lenwood/cfbd-mcp-server)** - An MCP server for the [College Football Data API](https://collegefootballdata.com/).
|
||||||
- **[Linear](https://github.com/jerhadf/linear-mcp-server)** - Allows LLM to interact with Linear's API for project management, including searching, creating, and updating issues.
|
- **[ChatMCP](https://github.com/AI-QL/chat-mcp)** – An Open Source Cross-platform GUI Desktop application compatible with Linux, macOS, and Windows, enabling seamless interaction with MCP servers across dynamically selectable LLMs, by **[AIQL](https://github.com/AI-QL)**
|
||||||
- **[Playwright](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright
|
- **[ChatSum](https://github.com/mcpso/mcp-server-chatsum)** - Query and Summarize chat messages with LLM. by [mcpso](https://mcp.so)
|
||||||
- **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM
|
- **[Chroma](https://github.com/privetin/chroma)** - Vector database server for semantic document search and metadata filtering, built on Chroma
|
||||||
- **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/)
|
- **[ClaudePost](https://github.com/ZilongXue/claude-post)** - ClaudePost enables seamless email management for Gmail, offering secure features like email search, reading, and sending.
|
||||||
- **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more.
|
- **[Cloudinary](https://github.com/felores/cloudinary-mcp-server)** - Cloudinary Model Context Protocol Server to upload media to Cloudinary and get back the media link and details.
|
||||||
- **[Windows CLI](https://github.com/SimonB97/win-cli-mcp-server)** - MCP server for secure command-line interactions on Windows systems, enabling controlled access to PowerShell, CMD, and Git Bash shells.
|
- **[code-sandbox-mcp](https://github.com/Automata-Labs-team/code-sandbox-mcp)** - An MCP server to create secure code sandbox environment for executing code within Docker containers.
|
||||||
- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org).
|
- **[cognee-mcp](https://github.com/topoteretes/cognee/tree/main/cognee-mcp)** - GraphRAG memory server with customizable ingestion, data processing and search
|
||||||
- **[FireCrawl](https://github.com/vrknetha/mcp-server-firecrawl)** - Advanced web scraping with JavaScript rendering, PDF support, and smart rate limiting
|
- **[coin_api_mcp](https://github.com/longmans/coin_api_mcp)** - Provides access to [coinmarketcap](https://coinmarketcap.com/) cryptocurrency data.
|
||||||
- **[AlphaVantage](https://github.com/calvernaz/alphavantage)** - MCP server for stock market data API [AlphaVantage](https://www.alphavantage.co)
|
|
||||||
- **[Docker](https://github.com/ckreiling/mcp-server-docker)** - Integrate with Docker to manage containers, images, volumes, and networks.
|
|
||||||
- **[Kubernetes](https://github.com/Flux159/mcp-server-kubernetes)** - Connect to Kubernetes cluster and manage pods, deployments, and services.
|
|
||||||
- **[OpenAPI](https://github.com/snaggle-ai/openapi-mcp-server)** - Interact with [OpenAPI](https://www.openapis.org/) APIs.
|
|
||||||
- **[Pandoc](https://github.com/vivekVells/mcp-pandoc)** - MCP server for seamless document format conversion using Pandoc, supporting Markdown, HTML, and plain text, with other formats like PDF, csv and docx in development.
|
|
||||||
- **[Pinecone](https://github.com/sirmews/mcp-pinecone)** - MCP server for searching and uploading records to Pinecone. Allows for simple RAG features, leveraging Pinecone's Inference API.
|
|
||||||
- **[HuggingFace Spaces](https://github.com/evalstate/mcp-hfspace)** - Server for using HuggingFace Spaces, supporting Open Source Image, Audio, Text Models and more. Claude Desktop mode for easy integration.
|
|
||||||
- **[ChatSum](https://github.com/chatmcp/mcp-server-chatsum)** - Query and Summarize chat messages with LLM. by [mcpso](https://mcp.so)
|
|
||||||
- **[Rememberizer AI](https://github.com/skydeckai/mcp-server-rememberizer)** - An MCP server designed for interacting with the Rememberizer data source, facilitating enhanced knowledge retrieval.
|
|
||||||
- **[FlightRadar24](https://github.com/sunsetcoder/flightradar24-mcp-server)** - A Claude Desktop MCP server that helps you track flights in real-time using Flightradar24 data.
|
|
||||||
- **[X (Twitter)](https://github.com/vidhupv/x-mcp)** (by vidhupv) - Create, manage and publish X/Twitter posts directly through Claude chat.
|
|
||||||
- **[X (Twitter)](https://github.com/EnesCinr/twitter-mcp)** (by EnesCinr) - Interact with twitter API. Post tweets and search for tweets by query.
|
|
||||||
- **[RAG Web Browser](https://github.com/apify/mcp-server-rag-web-browser)** An MCP server for Apify's RAG Web Browser Actor to perform web searches, scrape URLs, and return content in Markdown.
|
|
||||||
- **[XMind](https://github.com/apeyroux/mcp-xmind)** - Read and search through your XMind directory containing XMind files.
|
|
||||||
- **[oatpp-mcp](https://github.com/oatpp/oatpp-mcp)** - C++ MCP integration for Oat++. Use [Oat++](https://oatpp.io) to build MCP servers.
|
|
||||||
- **[Contentful-mcp](https://github.com/ivo-toby/contentful-mcp)** - Read, update, delete, publish content in your [Contentful](https://contentful.com) space(s) from this MCP Server.
|
- **[Contentful-mcp](https://github.com/ivo-toby/contentful-mcp)** - Read, update, delete, publish content in your [Contentful](https://contentful.com) space(s) from this MCP Server.
|
||||||
- **[Home Assistant](https://github.com/tevonsb/homeassistant-mcp)** - Interact with [Home Assistant](https://www.home-assistant.io/) including viewing and controlling lights, switches, sensors, and all other Home Assistant entities.
|
- **[Data Exploration](https://github.com/reading-plus-ai/mcp-server-data-exploration)** - MCP server for autonomous data exploration on .csv-based datasets, providing intelligent insights with minimal effort. NOTE: Will execute arbitrary Python code on your machine, please use with caution!
|
||||||
- **[cognee-mcp](https://github.com/topoteretes/cognee-mcp-server)** - GraphRAG memory server with customizable ingestion, data processing and search
|
- **[Dataset Viewer](https://github.com/privetin/dataset-viewer)** - Browse and analyze Hugging Face datasets with features like search, filtering, statistics, and data export
|
||||||
- **[Airtable](https://github.com/domdomegg/airtable-mcp-server)** - Read and write access to [Airtable](https://airtable.com/) databases, with schema inspection.
|
- **[DeepSeek MCP Server](https://github.com/DMontgomery40/deepseek-mcp-server)** - Model Context Protocol server integrating DeepSeek's advanced language models, in addition to [other useful API endpoints](https://github.com/DMontgomery40/deepseek-mcp-server?tab=readme-ov-file#features)
|
||||||
- **[mcp-k8s-go](https://github.com/strowk/mcp-k8s-go)** - Golang-based Kubernetes server for MCP to browse pods and their logs, events, namespaces and more. Built to be extensible.
|
- **[Descope](https://github.com/descope-sample-apps/descope-mcp-server)** - An MCP server to integrate with [Descope](https://descope.com) to search audit logs, manage users, and more.
|
||||||
- **[Notion](https://github.com/v-3/notion-server)** (by v-3) - Notion MCP integration. Search, Read, Update, and Create pages through Claude chat.
|
- **[DevRev](https://github.com/kpsunil97/devrev-mcp-server)** - An MCP server to integrate with DevRev APIs to search through your DevRev Knowledge Graph where objects can be imported from diff. sources listed [here](https://devrev.ai/docs/import#available-sources).
|
||||||
- **[Notion](https://github.com/suekou/mcp-notion-server)** (by suekou) - Interact with Notion API.
|
- **[Dify](https://github.com/YanxingLiu/dify-mcp-server)** - A simple implementation of an MCP server for dify workflows.
|
||||||
- **[TMDB](https://github.com/Laksh-star/mcp-server-tmdb)** - This MCP server integrates with The Movie Database (TMDB) API to provide movie information, search capabilities, and recommendations.
|
- **[Discord](https://github.com/v-3/discordmcp)** - A MCP server to connect to Discord guilds through a bot and read and write messages in channels
|
||||||
- **[MongoDB](https://github.com/kiliczsh/mcp-mongo-server)** - A Model Context Protocol Server for MongoDB.
|
- **[Docker](https://github.com/ckreiling/mcp-server-docker)** - Integrate with Docker to manage containers, images, volumes, and networks.
|
||||||
- **[Airtable](https://github.com/felores/airtable-mcp)** - Airtable Model Context Protocol Server.
|
- **[Drupal](https://github.com/Omedia/mcp-server-drupal)** - Server for interacting with [Drupal](https://www.drupal.org/project/mcp) using STDIO transport layer.
|
||||||
- **[Atlassian](https://github.com/sooperset/mcp-atlassian)** - Interact with Atlassian Cloud products (Confluence and Jira) including searching/reading Confluence spaces/pages, accessing Jira issues, and project metadata.
|
- **[Elasticsearch](https://github.com/cr7258/elasticsearch-mcp-server)** - MCP server implementation that provides Elasticsearch interaction.
|
||||||
|
- **[ElevenLabs](https://github.com/mamertofabian/elevenlabs-mcp-server)** - A server that integrates with ElevenLabs text-to-speech API capable of generating full voiceovers with multiple voices.
|
||||||
|
- **[Everything Search](https://github.com/mamertofabian/mcp-everything-search)** - Fast file searching capabilities across Windows (using [Everything SDK](https://www.voidtools.com/support/everything/sdk/)), macOS (using mdfind command), and Linux (using locate/plocate command).
|
||||||
|
- **[Fetch](https://github.com/zcaceres/fetch-mcp)** - A server that flexibly fetches HTML, JSON, Markdown, or plaintext.
|
||||||
|
- **[FireCrawl](https://github.com/vrknetha/mcp-server-firecrawl)** - Advanced web scraping with JavaScript rendering, PDF support, and smart rate limiting
|
||||||
|
- **[FlightRadar24](https://github.com/sunsetcoder/flightradar24-mcp-server)** - A Claude Desktop MCP server that helps you track flights in real-time using Flightradar24 data.
|
||||||
|
- **[Glean](https://github.com/longyi1207/glean-mcp-server)** - A server that uses Glean API to search and chat.
|
||||||
|
- **[Gmail](https://github.com/GongRzhe/Gmail-MCP-Server)** - A Model Context Protocol (MCP) server for Gmail integration in Claude Desktop with auto authentication support.
|
||||||
|
- **[Goal Story](https://github.com/hichana/goalstory-mcp)** - a Goal Tracker and Visualization Tool for personal and professional development.
|
||||||
|
- **[Golang Filesystem Server](https://github.com/mark3labs/mcp-filesystem-server)** - Secure file operations with configurable access controls built with Go!
|
||||||
|
- **[Google Calendar](https://github.com/v-3/google-calendar)** - Integration with Google Calendar to check schedules, find time, and add/delete events
|
||||||
|
- **[Google Calendar](https://github.com/nspady/google-calendar-mcp)** - Google Calendar MCP Server for managing Google calendar events. Also supports searching for events by attributes like title and location.
|
||||||
- **[Google Tasks](https://github.com/zcaceres/gtasks-mcp)** - Google Tasks API Model Context Protocol Server.
|
- **[Google Tasks](https://github.com/zcaceres/gtasks-mcp)** - Google Tasks API Model Context Protocol Server.
|
||||||
- **[Fetch](https://github.com/zcaceres/fetch-mcp)** - A server that flexibly fetches HTML, JSON, Markdown, or plaintext
|
- **[Home Assistant](https://github.com/tevonsb/homeassistant-mcp)** - Interact with [Home Assistant](https://www.home-assistant.io/) including viewing and controlling lights, switches, sensors, and all other Home Assistant entities.
|
||||||
|
- **[HubSpot](https://github.com/buryhuang/mcp-hubspot)** - HubSpot CRM integration for managing contacts and companies. Create and retrieve CRM data directly through Claude chat.
|
||||||
|
- **[HuggingFace Spaces](https://github.com/evalstate/mcp-hfspace)** - Server for using HuggingFace Spaces, supporting Open Source Image, Audio, Text Models and more. Claude Desktop mode for easy integration.
|
||||||
|
- **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles
|
||||||
|
- **[JSON](https://github.com/GongRzhe/JSON-MCP-Server)** - JSON handling and processing server with advanced query capabilities using JSONPath syntax and support for array, string, numeric, and date operations.
|
||||||
|
- **[Keycloak MCP](https://github.com/ChristophEnglisch/keycloak-model-context-protocol)** - This MCP server enables natural language interaction with Keycloak for user and realm management including creating, deleting, and listing users and realms.
|
||||||
|
- **[Kubernetes](https://github.com/Flux159/mcp-server-kubernetes)** - Connect to Kubernetes cluster and manage pods, deployments, and services.
|
||||||
|
- **[Linear](https://github.com/jerhadf/linear-mcp-server)** - Allows LLM to interact with Linear's API for project management, including searching, creating, and updating issues.
|
||||||
|
- **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/)
|
||||||
|
- **[llm-context](https://github.com/cyberchitta/llm-context.py)** - Provides a repo-packing MCP tool with configurable profiles that specify file inclusion/exclusion patterns and optional prompts.
|
||||||
|
- **[MCP Compass](https://github.com/liuyoshio/mcp-compass)** - Suggest the right MCP server for your needs
|
||||||
|
- **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you.
|
||||||
|
- **[mcp-k8s-go](https://github.com/strowk/mcp-k8s-go)** - Golang-based Kubernetes server for MCP to browse pods and their logs, events, namespaces and more. Built to be extensible.
|
||||||
|
- **[mcp-proxy](https://github.com/sparfenyuk/mcp-proxy)** - Connect to MCP servers that run on SSE transport, or expose stdio servers as an SSE server.
|
||||||
|
- **[MSSQL](https://github.com/aekanun2020/mcp-server/)** - MSSQL database integration with configurable access controls and schema inspection
|
||||||
|
- **[Markdownify](https://github.com/zcaceres/mcp-markdownify-server)** - MCP to convert almost anything to Markdown (PPTX, HTML, PDF, Youtube Transcripts and more)
|
||||||
|
- **[Minima](https://github.com/dmayboroda/minima)** - MCP server for RAG on local files
|
||||||
|
- **[MongoDB](https://github.com/kiliczsh/mcp-mongo-server)** - A Model Context Protocol Server for MongoDB.
|
||||||
|
- **[MySQL](https://github.com/benborla/mcp-server-mysql)** (by benborla) - MySQL database integration in NodeJS with configurable access controls and schema inspection
|
||||||
|
- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** (by DesignComputer) - MySQL database integration in Python with configurable access controls and schema inspection
|
||||||
|
- **[NS Travel Information](https://github.com/r-huijts/ns-mcp-server)** - Access Dutch Railways (NS) real-time train travel information and disruptions through the official NS API.
|
||||||
|
- **[Neovim](https://github.com/bigcodegen/mcp-neovim-server)** - An MCP Server for your Neovim session.
|
||||||
|
- **[Notion](https://github.com/suekou/mcp-notion-server)** (by suekou) - Interact with Notion API.
|
||||||
|
- **[Notion](https://github.com/v-3/notion-server)** (by v-3) - Notion MCP integration. Search, Read, Update, and Create pages through Claude chat.
|
||||||
|
- **[oatpp-mcp](https://github.com/oatpp/oatpp-mcp)** - C++ MCP integration for Oat++. Use [Oat++](https://oatpp.io) to build MCP servers.
|
||||||
|
- **[Obsidian Markdown Notes](https://github.com/calclavia/mcp-obsidian)** - Read and search through your Obsidian vault or any directory containing Markdown notes
|
||||||
|
- **[obsidian-mcp](https://github.com/StevenStavrakis/obsidian-mcp)** - (by Steven Stavrakis) An MCP server for Obsidian.md with tools for searching, reading, writing, and organizing notes.
|
||||||
|
- **[OpenAPI](https://github.com/snaggle-ai/openapi-mcp-server)** - Interact with [OpenAPI](https://www.openapis.org/) APIs.
|
||||||
|
- **[OpenCTI](https://github.com/Spathodea-Network/opencti-mcp)** - Interact with OpenCTI platform to retrieve threat intelligence data including reports, indicators, malware and threat actors.
|
||||||
|
- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org).
|
||||||
|
- **[Open Strategy Partners Marketing Tools](https://github.com/open-strategy-partners/osp_marketing_tools)** - Content editing codes, value map, and positioning tools for product marketing.
|
||||||
|
- **[Pandoc](https://github.com/vivekVells/mcp-pandoc)** - MCP server for seamless document format conversion using Pandoc, supporting Markdown, HTML, PDF, DOCX (.docx), csv and more.
|
||||||
|
- **[Pinecone](https://github.com/sirmews/mcp-pinecone)** - MCP server for searching and uploading records to Pinecone. Allows for simple RAG features, leveraging Pinecone's Inference API.
|
||||||
|
- **[Placid.app](https://github.com/felores/placid-mcp-server)** - Generate image and video creatives using Placid.app templates
|
||||||
|
- **[Playwright](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright
|
||||||
|
- **[Postman](https://github.com/shannonlal/mcp-postman)** - MCP server for running Postman Collections locally via Newman. Allows for simple execution of Postman Server and returns the results of whether the collection passed all the tests.
|
||||||
- **[RabbitMQ](https://github.com/kenliao94/mcp-server-rabbitmq)** - The MCP server that interacts with RabbitMQ to publish and consume messages.
|
- **[RabbitMQ](https://github.com/kenliao94/mcp-server-rabbitmq)** - The MCP server that interacts with RabbitMQ to publish and consume messages.
|
||||||
|
- **[RAG Web Browser](https://github.com/apify/mcp-server-rag-web-browser)** An MCP server for Apify's open-source RAG Web Browser [Actor](https://apify.com/apify/rag-web-browser) to perform web searches, scrape URLs, and return content in Markdown.
|
||||||
|
- **[Reaper](https://github.com/dschuler36/reaper-mcp-server)** - Interact with your [Reaper](https://www.reaper.fm/) (Digital Audio Workstation) projects.
|
||||||
|
- **[Redis](https://github.com/GongRzhe/REDIS-MCP-Server)** - Redis database operations and caching microservice server with support for key-value operations, expiration management, and pattern-based key listing.
|
||||||
|
- **[Rememberizer AI](https://github.com/skydeckai/mcp-server-rememberizer)** - An MCP server designed for interacting with the Rememberizer data source, facilitating enhanced knowledge retrieval.
|
||||||
|
- **[Rijksmuseum](https://github.com/r-huijts/rijksmuseum-mcp)** - Interface with the Rijksmuseum API to search artworks, retrieve artwork details, access image tiles, and explore user collections.
|
||||||
|
- **[Salesforce MCP](https://github.com/smn2gnt/MCP-Salesforce)** - Interact with Salesforce Data and Metadata
|
||||||
|
- **[Scholarly](https://github.com/adityak74/mcp-scholarly)** - A MCP server to search for scholarly and academic articles.
|
||||||
|
- **[SearXNG](https://github.com/ihor-sokoliuk/mcp-searxng)** - A Model Context Protocol Server for [SearXNG](https://docs.searxng.org)
|
||||||
|
- **[Snowflake](https://github.com/isaacwasserman/mcp-snowflake-server)** - This MCP server enables LLMs to interact with Snowflake databases, allowing for secure and controlled data operations.
|
||||||
|
- **[Spotify](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify.
|
||||||
|
- **[TMDB](https://github.com/Laksh-star/mcp-server-tmdb)** - This MCP server integrates with The Movie Database (TMDB) API to provide movie information, search capabilities, and recommendations.
|
||||||
|
- **[Tavily search](https://github.com/RamXX/mcp-tavily)** - An MCP server for Tavily's search & news API, with explicit site inclusions/exclusions
|
||||||
|
- **[Todoist](https://github.com/abhiz123/todoist-mcp-server)** - Interact with Todoist to manage your tasks.
|
||||||
|
- **[Travel Planner](https://github.com/GongRzhe/TRAVEL-PLANNER-MCP-Server)** - Travel planning and itinerary management server integrating with Google Maps API for location search, place details, and route calculations.
|
||||||
|
- **[Vega-Lite](https://github.com/isaacwasserman/mcp-vegalite-server)** - Generate visualizations from fetched data using the VegaLite format and renderer.
|
||||||
|
- **[Video Editor](https://github.com/burningion/video-editing-mcp)** - A Model Context Protocol Server to add, edit, and search videos with [Video Jungle](https://www.video-jungle.com/).
|
||||||
|
- **[Windows CLI](https://github.com/SimonB97/win-cli-mcp-server)** - MCP server for secure command-line interactions on Windows systems, enabling controlled access to PowerShell, CMD, and Git Bash shells.
|
||||||
|
- **[X (Twitter)](https://github.com/EnesCinr/twitter-mcp)** (by EnesCinr) - Interact with twitter API. Post tweets and search for tweets by query.
|
||||||
|
- **[X (Twitter)](https://github.com/vidhupv/x-mcp)** (by vidhupv) - Create, manage and publish X/Twitter posts directly through Claude chat.
|
||||||
|
- **[XMind](https://github.com/apeyroux/mcp-xmind)** - Read and search through your XMind directory containing XMind files.
|
||||||
|
|
||||||
|
## 📚 Frameworks
|
||||||
|
|
||||||
|
These are high-level frameworks that make it easier to build MCP servers or clients.
|
||||||
|
|
||||||
|
### For servers
|
||||||
|
|
||||||
|
* [EasyMCP](https://github.com/zcaceres/easy-mcp/) (TypeScript)
|
||||||
|
* [FastMCP](https://github.com/punkpeye/fastmcp) (TypeScript)
|
||||||
|
* **[Foxy Contexts](https://github.com/strowk/foxy-contexts)** – A library to build MCP servers in Golang by **[strowk](https://github.com/strowk)**
|
||||||
|
|
||||||
|
### For clients
|
||||||
|
|
||||||
|
* **[codemirror-mcp](https://github.com/marimo-team/codemirror-mcp)** - CodeMirror extension that implements the Model Context Protocol (MCP) for resource mentions and prompt commands
|
||||||
|
|
||||||
## 📚 Resources
|
## 📚 Resources
|
||||||
|
|
||||||
Additional resources on MCP.
|
Additional resources on MCP.
|
||||||
|
|
||||||
|
- **[AiMCP](https://www.aimcp.info)** - A collection of MCP clients&servers to find the right mcp tools by **[Hekmon](https://github.com/hekmon8)**
|
||||||
|
- **[Awesome Crypto MCP Servers by badkk](https://github.com/badkk/awesome-crypto-mcp-servers)** - A curated list of MCP servers by **[Luke Fan](https://github.com/badkk)**
|
||||||
|
- **[Awesome MCP Servers by appcypher](https://github.com/appcypher/awesome-mcp-servers)** - A curated list of MCP servers by **[Stephen Akinyemi](https://github.com/appcypher)**
|
||||||
- **[Awesome MCP Servers by punkpeye](https://github.com/punkpeye/awesome-mcp-servers)** (**[website](https://glama.ai/mcp/servers)**) - A curated list of MCP servers by **[Frank Fiegel](https://github.com/punkpeye)**
|
- **[Awesome MCP Servers by punkpeye](https://github.com/punkpeye/awesome-mcp-servers)** (**[website](https://glama.ai/mcp/servers)**) - A curated list of MCP servers by **[Frank Fiegel](https://github.com/punkpeye)**
|
||||||
- **[Awesome MCP Servers by wong2](https://github.com/wong2/awesome-mcp-servers)** (**[website](https://mcpservers.org)**) - A curated list of MCP servers by **[wong2](https://github.com/wong2)**
|
- **[Awesome MCP Servers by wong2](https://github.com/wong2/awesome-mcp-servers)** (**[website](https://mcpservers.org)**) - A curated list of MCP servers by **[wong2](https://github.com/wong2)**
|
||||||
- **[Awesome MCP Servers by appcypher](https://github.com/appcypher/awesome-mcp-servers)** - A curated list of MCP servers by **[Stephen Akinyemi](https://github.com/appcypher)**
|
|
||||||
- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)**
|
|
||||||
- **[Discord Server](https://glama.ai/mcp/discord)** – A community discord server dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**
|
- **[Discord Server](https://glama.ai/mcp/discord)** – A community discord server dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**
|
||||||
- **[Smithery](https://smithery.ai/)** - A registry of MCP servers to find the right tools for your LLM agents by **[Henry Mao](https://github.com/calclavia)**
|
- **[MCP Badges](https://github.com/mcpx-dev/mcp-badges)** – Quickly highlight your MCP project with clear, eye-catching badges, by **[Ironben](https://github.com/nanbingxyz)**
|
||||||
- **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)**
|
- **[MCP Servers Hub](https://github.com/apappascs/mcp-servers-hub)** (**[website](https://mcp-servers-hub-website.pages.dev/)**) - A curated list of MCP servers by **[apappascs](https://github.com/apappascs)**
|
||||||
- **[mcp-cli](https://github.com/wong2/mcp-cli)** - A CLI inspector for the Model Context Protocol by **[wong2](https://github.com/wong2)**
|
|
||||||
- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**
|
|
||||||
- **[MCP X Community](https://x.com/i/communities/1861891349609603310)** – A X community for MCP by **[Xiaoyi](https://x.com/chxy)**
|
- **[MCP X Community](https://x.com/i/communities/1861891349609603310)** – A X community for MCP by **[Xiaoyi](https://x.com/chxy)**
|
||||||
|
- **[mcp-cli](https://github.com/wong2/mcp-cli)** - A CLI inspector for the Model Context Protocol by **[wong2](https://github.com/wong2)**
|
||||||
|
- **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)**
|
||||||
- **[mcp-manager](https://github.com/zueai/mcp-manager)** - Simple Web UI to install and manage MCP servers for Claude Desktop by **[Zue](https://github.com/zueai)**
|
- **[mcp-manager](https://github.com/zueai/mcp-manager)** - Simple Web UI to install and manage MCP servers for Claude Desktop by **[Zue](https://github.com/zueai)**
|
||||||
- **[MCPHub](https://github.com/Jeamee/MCPHub-Desktop)** – An Open Source MacOS & Windows GUI Desktop app for discovering, installing and managing MCP servers by **[Jeamee](https://github.com/jeamee)**
|
- **[MCPHub](https://github.com/Jeamee/MCPHub-Desktop)** – An Open Source MacOS & Windows GUI Desktop app for discovering, installing and managing MCP servers by **[Jeamee](https://github.com/jeamee)**
|
||||||
|
- **[mcp.run](https://mcp.run)** - A hosted registry and control plane to install & run secure + portable MCP Servers.
|
||||||
|
- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)**
|
||||||
|
- <img height="12" width="12" src="https://opentools.com/favicon.ico" alt="OpenTools Logo" /> **[OpenTools](https://opentools.com)** - An open registry for finding, installing, and building with MCP servers by **[opentoolsteam](https://github.com/opentoolsteam)**
|
||||||
|
- **[PulseMCP](https://www.pulsemcp.com)** ([API](https://www.pulsemcp.com/api)) - Community hub & weekly newsletter for discovering MCP servers, clients, articles, and news by **[Tadas Antanavicius](https://github.com/tadasant)**, **[Mike Coughlin](https://github.com/macoughl)**, and **[Ravina Patel](https://github.com/ravinahp)**
|
||||||
|
- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**
|
||||||
|
- **[Smithery](https://smithery.ai/)** - A registry of MCP servers to find the right tools for your LLM agents by **[Henry Mao](https://github.com/calclavia)**
|
||||||
|
- **[Toolbase](https://gettoolbase.ai)** - Desktop application that manages tools and MCP servers with just a few clicks - no coding required by **[gching](https://github.com/gching)**
|
||||||
|
|
||||||
## 🚀 Getting Started
|
## 🚀 Getting Started
|
||||||
|
|
||||||
|
|||||||
20
package-lock.json
generated
20
package-lock.json
generated
@@ -2497,9 +2497,10 @@
|
|||||||
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="
|
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="
|
||||||
},
|
},
|
||||||
"node_modules/express": {
|
"node_modules/express": {
|
||||||
"version": "4.21.1",
|
"version": "4.21.2",
|
||||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz",
|
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||||
"integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==",
|
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"accepts": "~1.3.8",
|
"accepts": "~1.3.8",
|
||||||
"array-flatten": "1.1.1",
|
"array-flatten": "1.1.1",
|
||||||
@@ -2520,7 +2521,7 @@
|
|||||||
"methods": "~1.1.2",
|
"methods": "~1.1.2",
|
||||||
"on-finished": "2.4.1",
|
"on-finished": "2.4.1",
|
||||||
"parseurl": "~1.3.3",
|
"parseurl": "~1.3.3",
|
||||||
"path-to-regexp": "0.1.10",
|
"path-to-regexp": "0.1.12",
|
||||||
"proxy-addr": "~2.0.7",
|
"proxy-addr": "~2.0.7",
|
||||||
"qs": "6.13.0",
|
"qs": "6.13.0",
|
||||||
"range-parser": "~1.2.1",
|
"range-parser": "~1.2.1",
|
||||||
@@ -2535,6 +2536,10 @@
|
|||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.10.0"
|
"node": ">= 0.10.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/express"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/extend": {
|
"node_modules/extend": {
|
||||||
@@ -3770,9 +3775,10 @@
|
|||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/path-to-regexp": {
|
"node_modules/path-to-regexp": {
|
||||||
"version": "0.1.10",
|
"version": "0.1.12",
|
||||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz",
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
|
||||||
"integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w=="
|
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||||
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/pend": {
|
"node_modules/pend": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
|
|||||||
210
scripts/release.py
Executable file
210
scripts/release.py
Executable file
@@ -0,0 +1,210 @@
|
|||||||
|
#!/usr/bin/env uv run --script
|
||||||
|
# /// script
|
||||||
|
# requires-python = ">=3.12"
|
||||||
|
# dependencies = [
|
||||||
|
# "click>=8.1.8",
|
||||||
|
# "tomlkit>=0.13.2"
|
||||||
|
# ]
|
||||||
|
# ///
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import click
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
import tomlkit
|
||||||
|
import datetime
|
||||||
|
import subprocess
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Iterator, NewType, Protocol
|
||||||
|
|
||||||
|
|
||||||
|
Version = NewType("Version", str)
|
||||||
|
GitHash = NewType("GitHash", str)
|
||||||
|
|
||||||
|
|
||||||
|
class GitHashParamType(click.ParamType):
|
||||||
|
name = "git_hash"
|
||||||
|
|
||||||
|
def convert(
|
||||||
|
self, value: Any, param: click.Parameter | None, ctx: click.Context | None
|
||||||
|
) -> GitHash | None:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not (8 <= len(value) <= 40):
|
||||||
|
self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}")
|
||||||
|
|
||||||
|
if not re.match(r"^[0-9a-fA-F]+$", value):
|
||||||
|
self.fail("Git hash must contain only hex digits (0-9, a-f)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Verify hash exists in repo
|
||||||
|
subprocess.run(
|
||||||
|
["git", "rev-parse", "--verify", value], check=True, capture_output=True
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
self.fail(f"Git hash {value} not found in repository")
|
||||||
|
|
||||||
|
return GitHash(value.lower())
|
||||||
|
|
||||||
|
|
||||||
|
GIT_HASH = GitHashParamType()
|
||||||
|
|
||||||
|
|
||||||
|
class Package(Protocol):
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
def package_name(self) -> str: ...
|
||||||
|
|
||||||
|
def update_version(self, version: Version) -> None: ...
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NpmPackage:
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
def package_name(self) -> str:
|
||||||
|
with open(self.path / "package.json", "r") as f:
|
||||||
|
return json.load(f)["name"]
|
||||||
|
|
||||||
|
def update_version(self, version: Version):
|
||||||
|
with open(self.path / "package.json", "r+") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
data["version"] = version
|
||||||
|
f.seek(0)
|
||||||
|
json.dump(data, f, indent=2)
|
||||||
|
f.truncate()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PyPiPackage:
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
def package_name(self) -> str:
|
||||||
|
with open(self.path / "pyproject.toml") as f:
|
||||||
|
toml_data = tomlkit.parse(f.read())
|
||||||
|
name = toml_data.get("project", {}).get("name")
|
||||||
|
if not name:
|
||||||
|
raise Exception("No name in pyproject.toml project section")
|
||||||
|
return str(name)
|
||||||
|
|
||||||
|
def update_version(self, version: Version):
|
||||||
|
# Update version in pyproject.toml
|
||||||
|
with open(self.path / "pyproject.toml") as f:
|
||||||
|
data = tomlkit.parse(f.read())
|
||||||
|
data["project"]["version"] = version
|
||||||
|
|
||||||
|
with open(self.path / "pyproject.toml", "w") as f:
|
||||||
|
f.write(tomlkit.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
|
def has_changes(path: Path, git_hash: GitHash) -> bool:
|
||||||
|
"""Check if any files changed between current state and git hash"""
|
||||||
|
try:
|
||||||
|
output = subprocess.run(
|
||||||
|
["git", "diff", "--name-only", git_hash, "--", "."],
|
||||||
|
cwd=path,
|
||||||
|
check=True,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
changed_files = [Path(f) for f in output.stdout.splitlines()]
|
||||||
|
relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]]
|
||||||
|
return len(relevant_files) >= 1
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def gen_version() -> Version:
|
||||||
|
"""Generate version based on current date"""
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
return Version(f"{now.year}.{now.month}.{now.day}")
|
||||||
|
|
||||||
|
|
||||||
|
def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]:
|
||||||
|
for path in directory.glob("*/package.json"):
|
||||||
|
if has_changes(path.parent, git_hash):
|
||||||
|
yield NpmPackage(path.parent)
|
||||||
|
for path in directory.glob("*/pyproject.toml"):
|
||||||
|
if has_changes(path.parent, git_hash):
|
||||||
|
yield PyPiPackage(path.parent)
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def cli():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("update-packages")
|
||||||
|
@click.option(
|
||||||
|
"--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
|
||||||
|
)
|
||||||
|
@click.argument("git_hash", type=GIT_HASH)
|
||||||
|
def update_packages(directory: Path, git_hash: GitHash) -> int:
|
||||||
|
# Detect package type
|
||||||
|
path = directory.resolve(strict=True)
|
||||||
|
version = gen_version()
|
||||||
|
|
||||||
|
for package in find_changed_packages(path, git_hash):
|
||||||
|
name = package.package_name()
|
||||||
|
package.update_version(version)
|
||||||
|
|
||||||
|
click.echo(f"{name}@{version}")
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("generate-notes")
|
||||||
|
@click.option(
|
||||||
|
"--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
|
||||||
|
)
|
||||||
|
@click.argument("git_hash", type=GIT_HASH)
|
||||||
|
def generate_notes(directory: Path, git_hash: GitHash) -> int:
|
||||||
|
# Detect package type
|
||||||
|
path = directory.resolve(strict=True)
|
||||||
|
version = gen_version()
|
||||||
|
|
||||||
|
click.echo(f"# Release : v{version}")
|
||||||
|
click.echo("")
|
||||||
|
click.echo("## Updated packages")
|
||||||
|
for package in find_changed_packages(path, git_hash):
|
||||||
|
name = package.package_name()
|
||||||
|
click.echo(f"- {name}@{version}")
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("generate-version")
|
||||||
|
def generate_version() -> int:
|
||||||
|
# Detect package type
|
||||||
|
click.echo(gen_version())
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("generate-matrix")
|
||||||
|
@click.option(
|
||||||
|
"--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
|
||||||
|
)
|
||||||
|
@click.option("--npm", is_flag=True, default=False)
|
||||||
|
@click.option("--pypi", is_flag=True, default=False)
|
||||||
|
@click.argument("git_hash", type=GIT_HASH)
|
||||||
|
def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int:
|
||||||
|
# Detect package type
|
||||||
|
path = directory.resolve(strict=True)
|
||||||
|
version = gen_version()
|
||||||
|
|
||||||
|
changes = []
|
||||||
|
for package in find_changed_packages(path, git_hash):
|
||||||
|
pkg = package.path.relative_to(path)
|
||||||
|
if npm and isinstance(package, NpmPackage):
|
||||||
|
changes.append(str(pkg))
|
||||||
|
if pypi and isinstance(package, PyPiPackage):
|
||||||
|
changes.append(str(pkg))
|
||||||
|
|
||||||
|
click.echo(json.dumps(changes))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(cli())
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/aws-kb-retrieval-server /app
|
COPY src/aws-kb-retrieval-server /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||||
COPY src/brave-search /app
|
COPY src/brave-search /app
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/everart /app
|
COPY src/everart /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/everything /app
|
COPY src/everything /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -411,7 +411,7 @@ export const createServer = () => {
|
|||||||
maxTokens,
|
maxTokens,
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
content: [{ type: "text", text: `LLM sampling result: ${result}` }],
|
content: [{ type: "text", text: `LLM sampling result: ${result.content.text}` }],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,9 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc && shx chmod +x dist/*.js",
|
"build": "tsc && shx chmod +x dist/*.js",
|
||||||
"prepare": "npm run build",
|
"prepare": "npm run build",
|
||||||
"watch": "tsc --watch"
|
"watch": "tsc --watch",
|
||||||
|
"start": "node dist/index.js",
|
||||||
|
"start:sse": "node dist/sse.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@modelcontextprotocol/sdk": "1.0.1",
|
"@modelcontextprotocol/sdk": "1.0.1",
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"markdownify>=0.13.1",
|
"markdownify>=0.13.1",
|
||||||
"mcp>=1.0.0",
|
"mcp>=1.1.3",
|
||||||
"protego>=0.3.1",
|
"protego>=0.3.1",
|
||||||
"pydantic>=2.0.0",
|
"pydantic>=2.0.0",
|
||||||
"readabilipy>=0.2.0",
|
"readabilipy>=0.2.0",
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from mcp.shared.exceptions import McpError
|
|||||||
from mcp.server import Server
|
from mcp.server import Server
|
||||||
from mcp.server.stdio import stdio_server
|
from mcp.server.stdio import stdio_server
|
||||||
from mcp.types import (
|
from mcp.types import (
|
||||||
|
ErrorData,
|
||||||
GetPromptResult,
|
GetPromptResult,
|
||||||
Prompt,
|
Prompt,
|
||||||
PromptArgument,
|
PromptArgument,
|
||||||
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
|||||||
headers={"User-Agent": user_agent},
|
headers={"User-Agent": user_agent},
|
||||||
)
|
)
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
code=INTERNAL_ERROR,
|
||||||
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
|
message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
|
||||||
)
|
))
|
||||||
if response.status_code in (401, 403):
|
if response.status_code in (401, 403):
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
code=INTERNAL_ERROR,
|
||||||
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
|
message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
|
||||||
)
|
))
|
||||||
elif 400 <= response.status_code < 500:
|
elif 400 <= response.status_code < 500:
|
||||||
return
|
return
|
||||||
robot_txt = response.text
|
robot_txt = response.text
|
||||||
@@ -96,15 +97,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
|||||||
)
|
)
|
||||||
robot_parser = Protego.parse(processed_robot_txt)
|
robot_parser = Protego.parse(processed_robot_txt)
|
||||||
if not robot_parser.can_fetch(str(url), user_agent):
|
if not robot_parser.can_fetch(str(url), user_agent):
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
code=INTERNAL_ERROR,
|
||||||
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
|
message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
|
||||||
f"<useragent>{user_agent}</useragent>\n"
|
f"<useragent>{user_agent}</useragent>\n"
|
||||||
f"<url>{url}</url>"
|
f"<url>{url}</url>"
|
||||||
f"<robots>\n{robot_txt}\n</robots>\n"
|
f"<robots>\n{robot_txt}\n</robots>\n"
|
||||||
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
|
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
|
||||||
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
|
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
|
||||||
)
|
))
|
||||||
|
|
||||||
|
|
||||||
async def fetch_url(
|
async def fetch_url(
|
||||||
@@ -124,12 +125,12 @@ async def fetch_url(
|
|||||||
timeout=30,
|
timeout=30,
|
||||||
)
|
)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
|
raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}"))
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
code=INTERNAL_ERROR,
|
||||||
f"Failed to fetch {url} - status code {response.status_code}",
|
message=f"Failed to fetch {url} - status code {response.status_code}",
|
||||||
)
|
))
|
||||||
|
|
||||||
page_raw = response.text
|
page_raw = response.text
|
||||||
|
|
||||||
@@ -221,11 +222,11 @@ Although originally you did not have internet access, and were advised to refuse
|
|||||||
try:
|
try:
|
||||||
args = Fetch(**arguments)
|
args = Fetch(**arguments)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise McpError(INVALID_PARAMS, str(e))
|
raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e)))
|
||||||
|
|
||||||
url = str(args.url)
|
url = str(args.url)
|
||||||
if not url:
|
if not url:
|
||||||
raise McpError(INVALID_PARAMS, "URL is required")
|
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
|
||||||
|
|
||||||
if not ignore_robots_txt:
|
if not ignore_robots_txt:
|
||||||
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
|
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
|
||||||
@@ -233,15 +234,27 @@ Although originally you did not have internet access, and were advised to refuse
|
|||||||
content, prefix = await fetch_url(
|
content, prefix = await fetch_url(
|
||||||
url, user_agent_autonomous, force_raw=args.raw
|
url, user_agent_autonomous, force_raw=args.raw
|
||||||
)
|
)
|
||||||
if len(content) > args.max_length:
|
original_length = len(content)
|
||||||
content = content[args.start_index : args.start_index + args.max_length]
|
if args.start_index >= original_length:
|
||||||
content += f"\n\n<error>Content truncated. Call the fetch tool with a start_index of {args.start_index + args.max_length} to get more content.</error>"
|
content = "<error>No more content available.</error>"
|
||||||
|
else:
|
||||||
|
truncated_content = content[args.start_index : args.start_index + args.max_length]
|
||||||
|
if not truncated_content:
|
||||||
|
content = "<error>No more content available.</error>"
|
||||||
|
else:
|
||||||
|
content = truncated_content
|
||||||
|
actual_content_length = len(truncated_content)
|
||||||
|
remaining_content = original_length - (args.start_index + actual_content_length)
|
||||||
|
# Only add the prompt to continue fetching if there is still remaining content
|
||||||
|
if actual_content_length == args.max_length and remaining_content > 0:
|
||||||
|
next_start = args.start_index + actual_content_length
|
||||||
|
content += f"\n\n<error>Content truncated. Call the fetch tool with a start_index of {next_start} to get more content.</error>"
|
||||||
return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")]
|
return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")]
|
||||||
|
|
||||||
@server.get_prompt()
|
@server.get_prompt()
|
||||||
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
|
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
|
||||||
if not arguments or "url" not in arguments:
|
if not arguments or "url" not in arguments:
|
||||||
raise McpError(INVALID_PARAMS, "URL is required")
|
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
|
||||||
|
|
||||||
url = arguments["url"]
|
url = arguments["url"]
|
||||||
|
|
||||||
|
|||||||
181
src/fetch/uv.lock
generated
181
src/fetch/uv.lock
generated
@@ -48,71 +48,63 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "3.4.0"
|
version = "3.4.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 }
|
sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 },
|
{ url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 },
|
{ url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 },
|
{ url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 },
|
{ url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 },
|
{ url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 },
|
{ url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 },
|
{ url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 },
|
{ url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 },
|
{ url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 },
|
{ url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 },
|
{ url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 },
|
{ url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 },
|
{ url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 },
|
{ url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 },
|
{ url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 },
|
{ url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 },
|
{ url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 },
|
{ url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 },
|
{ url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 },
|
{ url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 },
|
{ url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 },
|
{ url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 },
|
{ url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 },
|
{ url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 },
|
{ url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 },
|
{ url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 },
|
{ url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 },
|
{ url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 },
|
{ url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 },
|
{ url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 },
|
{ url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 },
|
{ url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 },
|
{ url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 },
|
{ url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 },
|
{ url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 },
|
{ url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 },
|
{ url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 },
|
{ url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 },
|
{ url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 },
|
{ url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 },
|
{ url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 },
|
{ url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 },
|
{ url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 },
|
{ url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 },
|
{ url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 },
|
{ url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 },
|
{ url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 },
|
{ url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 },
|
{ url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 },
|
{ url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 },
|
{ url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 },
|
{ url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 },
|
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
|
||||||
{ url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -169,30 +161,31 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httpcore"
|
name = "httpcore"
|
||||||
version = "1.0.7"
|
version = "1.0.5"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "certifi" },
|
{ name = "certifi" },
|
||||||
{ name = "h11" },
|
{ name = "h11" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 }
|
sdist = { url = "https://files.pythonhosted.org/packages/17/b0/5e8b8674f8d203335a62fdfcfa0d11ebe09e23613c3391033cbba35f7926/httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61", size = 83234 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 },
|
{ url = "https://files.pythonhosted.org/packages/78/d4/e5d7e4f2174f8a4d63c8897d79eb8fe2503f7ecc03282fee1fa2719c2704/httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5", size = 77926 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httpx"
|
name = "httpx"
|
||||||
version = "0.28.0"
|
version = "0.27.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "anyio" },
|
{ name = "anyio" },
|
||||||
{ name = "certifi" },
|
{ name = "certifi" },
|
||||||
{ name = "httpcore" },
|
{ name = "httpcore" },
|
||||||
{ name = "idna" },
|
{ name = "idna" },
|
||||||
|
{ name = "sniffio" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307 }
|
sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551 },
|
{ url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -310,19 +303,21 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mcp"
|
name = "mcp"
|
||||||
version = "1.0.0"
|
version = "1.2.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "anyio" },
|
{ name = "anyio" },
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
{ name = "httpx-sse" },
|
{ name = "httpx-sse" },
|
||||||
{ name = "pydantic" },
|
{ name = "pydantic" },
|
||||||
|
{ name = "pydantic-settings" },
|
||||||
{ name = "sse-starlette" },
|
{ name = "sse-starlette" },
|
||||||
{ name = "starlette" },
|
{ name = "starlette" },
|
||||||
|
{ name = "uvicorn" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891 }
|
sdist = { url = "https://files.pythonhosted.org/packages/ab/a5/b08dc846ebedae9f17ced878e6975826e90e448cd4592f532f6a88a925a7/mcp-1.2.0.tar.gz", hash = "sha256:2b06c7ece98d6ea9e6379caa38d74b432385c338fb530cb82e2c70ea7add94f5", size = 102973 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361 },
|
{ url = "https://files.pythonhosted.org/packages/af/84/fca78f19ac8ce6c53ba416247c71baa53a9e791e98d3c81edbc20a77d6d1/mcp-1.2.0-py3-none-any.whl", hash = "sha256:1d0e77d8c14955a5aea1f5aa1f444c8e531c09355c829b20e42f7a142bc0755f", size = 66468 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -347,7 +342,7 @@ dev = [
|
|||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "markdownify", specifier = ">=0.13.1" },
|
{ name = "markdownify", specifier = ">=0.13.1" },
|
||||||
{ name = "mcp", specifier = ">=1.0.0" },
|
{ name = "mcp", specifier = ">=1.1.3" },
|
||||||
{ name = "protego", specifier = ">=0.3.1" },
|
{ name = "protego", specifier = ">=0.3.1" },
|
||||||
{ name = "pydantic", specifier = ">=2.0.0" },
|
{ name = "pydantic", specifier = ">=2.0.0" },
|
||||||
{ name = "readabilipy", specifier = ">=0.2.0" },
|
{ name = "readabilipy", specifier = ">=0.2.0" },
|
||||||
@@ -380,16 +375,16 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic"
|
name = "pydantic"
|
||||||
version = "2.10.2"
|
version = "2.10.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "annotated-types" },
|
{ name = "annotated-types" },
|
||||||
{ name = "pydantic-core" },
|
{ name = "pydantic-core" },
|
||||||
{ name = "typing-extensions" },
|
{ name = "typing-extensions" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", size = 785401 }
|
sdist = { url = "https://files.pythonhosted.org/packages/c4/bd/7fc610993f616d2398958d0028d15eaf53bde5f80cb2edb7aa4f1feaf3a7/pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560", size = 783717 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e", size = 456364 },
|
{ url = "https://files.pythonhosted.org/packages/e0/fc/fda48d347bd50a788dd2a0f318a52160f911b86fc2d8b4c86f4d7c9bceea/pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e", size = 455329 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -467,6 +462,19 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/33/72/f881b5e18fbb67cf2fb4ab253660de3c6899dbb2dba409d0b757e3559e3d/pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c", size = 2001864 },
|
{ url = "https://files.pythonhosted.org/packages/33/72/f881b5e18fbb67cf2fb4ab253660de3c6899dbb2dba409d0b757e3559e3d/pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c", size = 2001864 },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic-settings"
|
||||||
|
version = "2.6.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pydantic" },
|
||||||
|
{ name = "python-dotenv" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595 },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyright"
|
name = "pyright"
|
||||||
version = "1.1.389"
|
version = "1.1.389"
|
||||||
@@ -480,6 +488,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/1b/26/c288cabf8cfc5a27e1aa9e5029b7682c0f920b8074f45d22bf844314d66a/pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60", size = 18581 },
|
{ url = "https://files.pythonhosted.org/packages/1b/26/c288cabf8cfc5a27e1aa9e5029b7682c0f920b8074f45d22bf844314d66a/pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60", size = 18581 },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dotenv"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "readabilipy"
|
name = "readabilipy"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -647,14 +664,14 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "starlette"
|
name = "starlette"
|
||||||
version = "0.41.3"
|
version = "0.41.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "anyio" },
|
{ name = "anyio" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159 }
|
sdist = { url = "https://files.pythonhosted.org/packages/3e/da/1fb4bdb72ae12b834becd7e1e7e47001d32f91ec0ce8d7bc1b618d9f0bd9/starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62", size = 2573867 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 },
|
{ url = "https://files.pythonhosted.org/packages/54/43/f185bfd0ca1d213beb4293bed51d92254df23d8ceaf6c0e17146d508a776/starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d", size = 73259 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -677,16 +694,16 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uvicorn"
|
name = "uvicorn"
|
||||||
version = "0.32.1"
|
version = "0.32.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "click" },
|
{ name = "click" },
|
||||||
{ name = "h11" },
|
{ name = "h11" },
|
||||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/3c/21dba3e7d76138725ef307e3d7ddd29b763119b3aa459d02cc05fefcff75/uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175", size = 77630 }
|
sdist = { url = "https://files.pythonhosted.org/packages/e0/fc/1d785078eefd6945f3e5bab5c076e4230698046231eb0f3747bc5c8fa992/uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e", size = 77564 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/50/c1/2d27b0a15826c2b71dcf6e2f5402181ef85acf439617bb2f1453125ce1f3/uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e", size = 63828 },
|
{ url = "https://files.pythonhosted.org/packages/eb/14/78bd0e95dd2444b6caacbca2b730671d4295ccb628ef58b81bee903629df/uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82", size = 63723 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ Note: all directories must be mounted to `/projects` by default.
|
|||||||
"--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
|
"--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
|
||||||
"--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
|
"--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
|
||||||
"mcp/filesystem",
|
"mcp/filesystem",
|
||||||
"/projects",
|
"/projects"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/gdrive /app
|
COPY src/gdrive /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import {
|
|||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import { google } from "googleapis";
|
import { google } from "googleapis";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
const drive = google.drive("v3");
|
const drive = google.drive("v3");
|
||||||
|
|
||||||
@@ -176,7 +177,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const credentialsPath = process.env.GDRIVE_CREDENTIALS_PATH || path.join(
|
const credentialsPath = process.env.GDRIVE_CREDENTIALS_PATH || path.join(
|
||||||
path.dirname(new URL(import.meta.url).pathname),
|
path.dirname(fileURLToPath(import.meta.url)),
|
||||||
"../../../.gdrive-server-credentials.json",
|
"../../../.gdrive-server-credentials.json",
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -184,7 +185,7 @@ async function authenticateAndSaveCredentials() {
|
|||||||
console.log("Launching auth flow…");
|
console.log("Launching auth flow…");
|
||||||
const auth = await authenticate({
|
const auth = await authenticate({
|
||||||
keyfilePath: process.env.GDRIVE_OAUTH_PATH || path.join(
|
keyfilePath: process.env.GDRIVE_OAUTH_PATH || path.join(
|
||||||
path.dirname(new URL(import.meta.url).pathname),
|
path.dirname(fileURLToPath(import.meta.url)),
|
||||||
"../../../gcp-oauth.keys.json",
|
"../../../gcp-oauth.keys.json",
|
||||||
),
|
),
|
||||||
scopes: ["https://www.googleapis.com/auth/drive.readonly"],
|
scopes: ["https://www.googleapis.com/auth/drive.readonly"],
|
||||||
|
|||||||
@@ -67,18 +67,23 @@ Please note that mcp-server-git is currently in early development. The functiona
|
|||||||
- `branch_name` (string): Name of the new branch
|
- `branch_name` (string): Name of the new branch
|
||||||
- `start_point` (string, optional): Starting point for the new branch
|
- `start_point` (string, optional): Starting point for the new branch
|
||||||
- Returns: Confirmation of branch creation
|
- Returns: Confirmation of branch creation
|
||||||
8. `git_checkout`
|
10. `git_checkout`
|
||||||
- Switches branches
|
- Switches branches
|
||||||
- Inputs:
|
- Inputs:
|
||||||
- `repo_path` (string): Path to Git repository
|
- `repo_path` (string): Path to Git repository
|
||||||
- `branch_name` (string): Name of branch to checkout
|
- `branch_name` (string): Name of branch to checkout
|
||||||
- Returns: Confirmation of branch switch
|
- Returns: Confirmation of branch switch
|
||||||
9. `git_show`
|
11. `git_show`
|
||||||
- Shows the contents of a commit
|
- Shows the contents of a commit
|
||||||
- Inputs:
|
- Inputs:
|
||||||
- `repo_path` (string): Path to Git repository
|
- `repo_path` (string): Path to Git repository
|
||||||
- `revision` (string): The revision (commit hash, branch name, tag) to show
|
- `revision` (string): The revision (commit hash, branch name, tag) to show
|
||||||
- Returns: Contents of the specified commit
|
- Returns: Contents of the specified commit
|
||||||
|
12. `git_init`
|
||||||
|
- Initializes a Git repository
|
||||||
|
- Inputs:
|
||||||
|
- `repo_path` (string): Path to directory to initialize git repo
|
||||||
|
- Returns: Confirmation of repository initialization
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -213,7 +218,7 @@ If you are doing local development, there are two ways to test your changes:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"mcpServers": {
|
"mcpServers": {
|
||||||
"brave-search": {
|
"git": {
|
||||||
"command": "docker",
|
"command": "docker",
|
||||||
"args": [
|
"args": [
|
||||||
"run",
|
"run",
|
||||||
|
|||||||
@@ -56,6 +56,9 @@ class GitShow(BaseModel):
|
|||||||
repo_path: str
|
repo_path: str
|
||||||
revision: str
|
revision: str
|
||||||
|
|
||||||
|
class GitInit(BaseModel):
|
||||||
|
repo_path: str
|
||||||
|
|
||||||
class GitTools(str, Enum):
|
class GitTools(str, Enum):
|
||||||
STATUS = "git_status"
|
STATUS = "git_status"
|
||||||
DIFF_UNSTAGED = "git_diff_unstaged"
|
DIFF_UNSTAGED = "git_diff_unstaged"
|
||||||
@@ -68,6 +71,7 @@ class GitTools(str, Enum):
|
|||||||
CREATE_BRANCH = "git_create_branch"
|
CREATE_BRANCH = "git_create_branch"
|
||||||
CHECKOUT = "git_checkout"
|
CHECKOUT = "git_checkout"
|
||||||
SHOW = "git_show"
|
SHOW = "git_show"
|
||||||
|
INIT = "git_init"
|
||||||
|
|
||||||
def git_status(repo: git.Repo) -> str:
|
def git_status(repo: git.Repo) -> str:
|
||||||
return repo.git.status()
|
return repo.git.status()
|
||||||
@@ -118,6 +122,13 @@ def git_checkout(repo: git.Repo, branch_name: str) -> str:
|
|||||||
repo.git.checkout(branch_name)
|
repo.git.checkout(branch_name)
|
||||||
return f"Switched to branch '{branch_name}'"
|
return f"Switched to branch '{branch_name}'"
|
||||||
|
|
||||||
|
def git_init(repo_path: str) -> str:
|
||||||
|
try:
|
||||||
|
repo = git.Repo.init(path=repo_path, mkdir=True)
|
||||||
|
return f"Initialized empty Git repository in {repo.git_dir}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error initializing repository: {str(e)}"
|
||||||
|
|
||||||
def git_show(repo: git.Repo, revision: str) -> str:
|
def git_show(repo: git.Repo, revision: str) -> str:
|
||||||
commit = repo.commit(revision)
|
commit = repo.commit(revision)
|
||||||
output = [
|
output = [
|
||||||
@@ -206,6 +217,11 @@ async def serve(repository: Path | None) -> None:
|
|||||||
name=GitTools.SHOW,
|
name=GitTools.SHOW,
|
||||||
description="Shows the contents of a commit",
|
description="Shows the contents of a commit",
|
||||||
inputSchema=GitShow.schema(),
|
inputSchema=GitShow.schema(),
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name=GitTools.INIT,
|
||||||
|
description="Initialize a new Git repository",
|
||||||
|
inputSchema=GitInit.schema(),
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -241,6 +257,16 @@ async def serve(repository: Path | None) -> None:
|
|||||||
@server.call_tool()
|
@server.call_tool()
|
||||||
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
||||||
repo_path = Path(arguments["repo_path"])
|
repo_path = Path(arguments["repo_path"])
|
||||||
|
|
||||||
|
# Handle git init separately since it doesn't require an existing repo
|
||||||
|
if name == GitTools.INIT:
|
||||||
|
result = git_init(str(repo_path))
|
||||||
|
return [TextContent(
|
||||||
|
type="text",
|
||||||
|
text=result
|
||||||
|
)]
|
||||||
|
|
||||||
|
# For all other commands, we need an existing repo
|
||||||
repo = git.Repo(repo_path)
|
repo = git.Repo(repo_path)
|
||||||
|
|
||||||
match name:
|
match name:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||||
COPY src/github /app
|
COPY src/github /app
|
||||||
|
|||||||
@@ -188,6 +188,95 @@ MCP Server for the GitHub API, enabling file operations, repository management,
|
|||||||
- `issue_number` (number): Issue number to retrieve
|
- `issue_number` (number): Issue number to retrieve
|
||||||
- Returns: Github Issue object & details
|
- Returns: Github Issue object & details
|
||||||
|
|
||||||
|
18. `get_pull_request`
|
||||||
|
- Get details of a specific pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- Returns: Pull request details including diff and review status
|
||||||
|
|
||||||
|
19. `list_pull_requests`
|
||||||
|
- List and filter repository pull requests
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `state` (optional string): Filter by state ('open', 'closed', 'all')
|
||||||
|
- `head` (optional string): Filter by head user/org and branch
|
||||||
|
- `base` (optional string): Filter by base branch
|
||||||
|
- `sort` (optional string): Sort by ('created', 'updated', 'popularity', 'long-running')
|
||||||
|
- `direction` (optional string): Sort direction ('asc', 'desc')
|
||||||
|
- `per_page` (optional number): Results per page (max 100)
|
||||||
|
- `page` (optional number): Page number
|
||||||
|
- Returns: Array of pull request details
|
||||||
|
|
||||||
|
20. `create_pull_request_review`
|
||||||
|
- Create a review on a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- `body` (string): Review comment text
|
||||||
|
- `event` (string): Review action ('APPROVE', 'REQUEST_CHANGES', 'COMMENT')
|
||||||
|
- `commit_id` (optional string): SHA of commit to review
|
||||||
|
- `comments` (optional array): Line-specific comments, each with:
|
||||||
|
- `path` (string): File path
|
||||||
|
- `position` (number): Line position in diff
|
||||||
|
- `body` (string): Comment text
|
||||||
|
- Returns: Created review details
|
||||||
|
|
||||||
|
21. `merge_pull_request`
|
||||||
|
- Merge a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- `commit_title` (optional string): Title for merge commit
|
||||||
|
- `commit_message` (optional string): Extra detail for merge commit
|
||||||
|
- `merge_method` (optional string): Merge method ('merge', 'squash', 'rebase')
|
||||||
|
- Returns: Merge result details
|
||||||
|
|
||||||
|
22. `get_pull_request_files`
|
||||||
|
- Get the list of files changed in a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- Returns: Array of changed files with patch and status details
|
||||||
|
|
||||||
|
23. `get_pull_request_status`
|
||||||
|
- Get the combined status of all status checks for a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- Returns: Combined status check results and individual check details
|
||||||
|
|
||||||
|
24. `update_pull_request_branch`
|
||||||
|
- Update a pull request branch with the latest changes from the base branch (equivalent to GitHub's "Update branch" button)
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- `expected_head_sha` (optional string): The expected SHA of the pull request's HEAD ref
|
||||||
|
- Returns: Success message when branch is updated
|
||||||
|
|
||||||
|
25. `get_pull_request_comments`
|
||||||
|
- Get the review comments on a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- Returns: Array of pull request review comments with details like the comment text, author, and location in the diff
|
||||||
|
|
||||||
|
26. `get_pull_request_reviews`
|
||||||
|
- Get the reviews on a pull request
|
||||||
|
- Inputs:
|
||||||
|
- `owner` (string): Repository owner
|
||||||
|
- `repo` (string): Repository name
|
||||||
|
- `pull_number` (number): Pull request number
|
||||||
|
- Returns: Array of pull request reviews with details like the review state (APPROVED, CHANGES_REQUESTED, etc.), reviewer, and review body
|
||||||
|
|
||||||
## Search Query Syntax
|
## Search Query Syntax
|
||||||
|
|
||||||
### Code Search
|
### Code Search
|
||||||
@@ -257,13 +346,13 @@ To use this with Claude Desktop, add the following to your `claude_desktop_confi
|
|||||||
"args": [
|
"args": [
|
||||||
"-y",
|
"-y",
|
||||||
"@modelcontextprotocol/server-github"
|
"@modelcontextprotocol/server-github"
|
||||||
]
|
],
|
||||||
},
|
|
||||||
"env": {
|
"env": {
|
||||||
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
|
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Build
|
## Build
|
||||||
|
|||||||
89
src/github/common/errors.ts
Normal file
89
src/github/common/errors.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
export class GitHubError extends Error {
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public readonly status: number,
|
||||||
|
public readonly response: unknown
|
||||||
|
) {
|
||||||
|
super(message);
|
||||||
|
this.name = "GitHubError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubValidationError extends GitHubError {
|
||||||
|
constructor(message: string, status: number, response: unknown) {
|
||||||
|
super(message, status, response);
|
||||||
|
this.name = "GitHubValidationError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubResourceNotFoundError extends GitHubError {
|
||||||
|
constructor(resource: string) {
|
||||||
|
super(`Resource not found: ${resource}`, 404, { message: `${resource} not found` });
|
||||||
|
this.name = "GitHubResourceNotFoundError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubAuthenticationError extends GitHubError {
|
||||||
|
constructor(message = "Authentication failed") {
|
||||||
|
super(message, 401, { message });
|
||||||
|
this.name = "GitHubAuthenticationError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubPermissionError extends GitHubError {
|
||||||
|
constructor(message = "Insufficient permissions") {
|
||||||
|
super(message, 403, { message });
|
||||||
|
this.name = "GitHubPermissionError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubRateLimitError extends GitHubError {
|
||||||
|
constructor(
|
||||||
|
message = "Rate limit exceeded",
|
||||||
|
public readonly resetAt: Date
|
||||||
|
) {
|
||||||
|
super(message, 429, { message, reset_at: resetAt.toISOString() });
|
||||||
|
this.name = "GitHubRateLimitError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GitHubConflictError extends GitHubError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message, 409, { message });
|
||||||
|
this.name = "GitHubConflictError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isGitHubError(error: unknown): error is GitHubError {
|
||||||
|
return error instanceof GitHubError;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createGitHubError(status: number, response: any): GitHubError {
|
||||||
|
switch (status) {
|
||||||
|
case 401:
|
||||||
|
return new GitHubAuthenticationError(response?.message);
|
||||||
|
case 403:
|
||||||
|
return new GitHubPermissionError(response?.message);
|
||||||
|
case 404:
|
||||||
|
return new GitHubResourceNotFoundError(response?.message || "Resource");
|
||||||
|
case 409:
|
||||||
|
return new GitHubConflictError(response?.message || "Conflict occurred");
|
||||||
|
case 422:
|
||||||
|
return new GitHubValidationError(
|
||||||
|
response?.message || "Validation failed",
|
||||||
|
status,
|
||||||
|
response
|
||||||
|
);
|
||||||
|
case 429:
|
||||||
|
return new GitHubRateLimitError(
|
||||||
|
response?.message,
|
||||||
|
new Date(response?.reset_at || Date.now() + 60000)
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return new GitHubError(
|
||||||
|
response?.message || "GitHub API error",
|
||||||
|
status,
|
||||||
|
response
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
259
src/github/common/types.ts
Normal file
259
src/github/common/types.ts
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
// Base schemas for common types
|
||||||
|
export const GitHubAuthorSchema = z.object({
|
||||||
|
name: z.string(),
|
||||||
|
email: z.string(),
|
||||||
|
date: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubOwnerSchema = z.object({
|
||||||
|
login: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
avatar_url: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubRepositorySchema = z.object({
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
full_name: z.string(),
|
||||||
|
private: z.boolean(),
|
||||||
|
owner: GitHubOwnerSchema,
|
||||||
|
html_url: z.string(),
|
||||||
|
description: z.string().nullable(),
|
||||||
|
fork: z.boolean(),
|
||||||
|
url: z.string(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
pushed_at: z.string(),
|
||||||
|
git_url: z.string(),
|
||||||
|
ssh_url: z.string(),
|
||||||
|
clone_url: z.string(),
|
||||||
|
default_branch: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GithubFileContentLinks = z.object({
|
||||||
|
self: z.string(),
|
||||||
|
git: z.string().nullable(),
|
||||||
|
html: z.string().nullable()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubFileContentSchema = z.object({
|
||||||
|
name: z.string(),
|
||||||
|
path: z.string(),
|
||||||
|
sha: z.string(),
|
||||||
|
size: z.number(),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
git_url: z.string(),
|
||||||
|
download_url: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
content: z.string().optional(),
|
||||||
|
encoding: z.string().optional(),
|
||||||
|
_links: GithubFileContentLinks
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubDirectoryContentSchema = z.object({
|
||||||
|
type: z.string(),
|
||||||
|
size: z.number(),
|
||||||
|
name: z.string(),
|
||||||
|
path: z.string(),
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
git_url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
download_url: z.string().nullable(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubContentSchema = z.union([
|
||||||
|
GitHubFileContentSchema,
|
||||||
|
z.array(GitHubDirectoryContentSchema),
|
||||||
|
]);
|
||||||
|
|
||||||
|
export const GitHubTreeEntrySchema = z.object({
|
||||||
|
path: z.string(),
|
||||||
|
mode: z.enum(["100644", "100755", "040000", "160000", "120000"]),
|
||||||
|
type: z.enum(["blob", "tree", "commit"]),
|
||||||
|
size: z.number().optional(),
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubTreeSchema = z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
tree: z.array(GitHubTreeEntrySchema),
|
||||||
|
truncated: z.boolean(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubCommitSchema = z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
node_id: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
author: GitHubAuthorSchema,
|
||||||
|
committer: GitHubAuthorSchema,
|
||||||
|
message: z.string(),
|
||||||
|
tree: z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
}),
|
||||||
|
parents: z.array(
|
||||||
|
z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubListCommitsSchema = z.array(z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
node_id: z.string(),
|
||||||
|
commit: z.object({
|
||||||
|
author: GitHubAuthorSchema,
|
||||||
|
committer: GitHubAuthorSchema,
|
||||||
|
message: z.string(),
|
||||||
|
tree: z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string()
|
||||||
|
}),
|
||||||
|
url: z.string(),
|
||||||
|
comment_count: z.number(),
|
||||||
|
}),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
comments_url: z.string()
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const GitHubReferenceSchema = z.object({
|
||||||
|
ref: z.string(),
|
||||||
|
node_id: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
object: z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
// User and assignee schemas
|
||||||
|
export const GitHubIssueAssigneeSchema = z.object({
|
||||||
|
login: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
avatar_url: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Issue-related schemas
|
||||||
|
export const GitHubLabelSchema = z.object({
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
color: z.string(),
|
||||||
|
default: z.boolean(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubMilestoneSchema = z.object({
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
labels_url: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
number: z.number(),
|
||||||
|
title: z.string(),
|
||||||
|
description: z.string(),
|
||||||
|
state: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubIssueSchema = z.object({
|
||||||
|
url: z.string(),
|
||||||
|
repository_url: z.string(),
|
||||||
|
labels_url: z.string(),
|
||||||
|
comments_url: z.string(),
|
||||||
|
events_url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
number: z.number(),
|
||||||
|
title: z.string(),
|
||||||
|
user: GitHubIssueAssigneeSchema,
|
||||||
|
labels: z.array(GitHubLabelSchema),
|
||||||
|
state: z.string(),
|
||||||
|
locked: z.boolean(),
|
||||||
|
assignee: GitHubIssueAssigneeSchema.nullable(),
|
||||||
|
assignees: z.array(GitHubIssueAssigneeSchema),
|
||||||
|
milestone: GitHubMilestoneSchema.nullable(),
|
||||||
|
comments: z.number(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
closed_at: z.string().nullable(),
|
||||||
|
body: z.string().nullable(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Search-related schemas
|
||||||
|
export const GitHubSearchResponseSchema = z.object({
|
||||||
|
total_count: z.number(),
|
||||||
|
incomplete_results: z.boolean(),
|
||||||
|
items: z.array(GitHubRepositorySchema),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Pull request schemas
|
||||||
|
export const GitHubPullRequestRefSchema = z.object({
|
||||||
|
label: z.string(),
|
||||||
|
ref: z.string(),
|
||||||
|
sha: z.string(),
|
||||||
|
user: GitHubIssueAssigneeSchema,
|
||||||
|
repo: GitHubRepositorySchema,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubPullRequestSchema = z.object({
|
||||||
|
url: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
diff_url: z.string(),
|
||||||
|
patch_url: z.string(),
|
||||||
|
issue_url: z.string(),
|
||||||
|
number: z.number(),
|
||||||
|
state: z.string(),
|
||||||
|
locked: z.boolean(),
|
||||||
|
title: z.string(),
|
||||||
|
user: GitHubIssueAssigneeSchema,
|
||||||
|
body: z.string().nullable(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
closed_at: z.string().nullable(),
|
||||||
|
merged_at: z.string().nullable(),
|
||||||
|
merge_commit_sha: z.string().nullable(),
|
||||||
|
assignee: GitHubIssueAssigneeSchema.nullable(),
|
||||||
|
assignees: z.array(GitHubIssueAssigneeSchema),
|
||||||
|
requested_reviewers: z.array(GitHubIssueAssigneeSchema),
|
||||||
|
labels: z.array(GitHubLabelSchema),
|
||||||
|
head: GitHubPullRequestRefSchema,
|
||||||
|
base: GitHubPullRequestRefSchema,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Export types
|
||||||
|
export type GitHubAuthor = z.infer<typeof GitHubAuthorSchema>;
|
||||||
|
export type GitHubRepository = z.infer<typeof GitHubRepositorySchema>;
|
||||||
|
export type GitHubFileContent = z.infer<typeof GitHubFileContentSchema>;
|
||||||
|
export type GitHubDirectoryContent = z.infer<typeof GitHubDirectoryContentSchema>;
|
||||||
|
export type GitHubContent = z.infer<typeof GitHubContentSchema>;
|
||||||
|
export type GitHubTree = z.infer<typeof GitHubTreeSchema>;
|
||||||
|
export type GitHubCommit = z.infer<typeof GitHubCommitSchema>;
|
||||||
|
export type GitHubListCommits = z.infer<typeof GitHubListCommitsSchema>;
|
||||||
|
export type GitHubReference = z.infer<typeof GitHubReferenceSchema>;
|
||||||
|
export type GitHubIssueAssignee = z.infer<typeof GitHubIssueAssigneeSchema>;
|
||||||
|
export type GitHubLabel = z.infer<typeof GitHubLabelSchema>;
|
||||||
|
export type GitHubMilestone = z.infer<typeof GitHubMilestoneSchema>;
|
||||||
|
export type GitHubIssue = z.infer<typeof GitHubIssueSchema>;
|
||||||
|
export type GitHubSearchResponse = z.infer<typeof GitHubSearchResponseSchema>;
|
||||||
|
export type GitHubPullRequest = z.infer<typeof GitHubPullRequestSchema>;
|
||||||
|
export type GitHubPullRequestRef = z.infer<typeof GitHubPullRequestRefSchema>;
|
||||||
138
src/github/common/utils.ts
Normal file
138
src/github/common/utils.ts
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import { getUserAgent } from "universal-user-agent";
|
||||||
|
import { createGitHubError } from "./errors.js";
|
||||||
|
import { VERSION } from "./version.js";
|
||||||
|
|
||||||
|
type RequestOptions = {
|
||||||
|
method?: string;
|
||||||
|
body?: unknown;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function parseResponseBody(response: Response): Promise<unknown> {
|
||||||
|
const contentType = response.headers.get("content-type");
|
||||||
|
if (contentType?.includes("application/json")) {
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildUrl(baseUrl: string, params: Record<string, string | number | undefined>): string {
|
||||||
|
const url = new URL(baseUrl);
|
||||||
|
Object.entries(params).forEach(([key, value]) => {
|
||||||
|
if (value !== undefined) {
|
||||||
|
url.searchParams.append(key, value.toString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return url.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
const USER_AGENT = `modelcontextprotocol/servers/github/v${VERSION} ${getUserAgent()}`;
|
||||||
|
|
||||||
|
export async function githubRequest(
|
||||||
|
url: string,
|
||||||
|
options: RequestOptions = {}
|
||||||
|
): Promise<unknown> {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
"Accept": "application/vnd.github.v3+json",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
...options.headers,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (process.env.GITHUB_PERSONAL_ACCESS_TOKEN) {
|
||||||
|
headers["Authorization"] = `Bearer ${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: options.method || "GET",
|
||||||
|
headers,
|
||||||
|
body: options.body ? JSON.stringify(options.body) : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseBody = await parseResponseBody(response);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw createGitHubError(response.status, responseBody);
|
||||||
|
}
|
||||||
|
|
||||||
|
return responseBody;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateBranchName(branch: string): string {
|
||||||
|
const sanitized = branch.trim();
|
||||||
|
if (!sanitized) {
|
||||||
|
throw new Error("Branch name cannot be empty");
|
||||||
|
}
|
||||||
|
if (sanitized.includes("..")) {
|
||||||
|
throw new Error("Branch name cannot contain '..'");
|
||||||
|
}
|
||||||
|
if (/[\s~^:?*[\\\]]/.test(sanitized)) {
|
||||||
|
throw new Error("Branch name contains invalid characters");
|
||||||
|
}
|
||||||
|
if (sanitized.startsWith("/") || sanitized.endsWith("/")) {
|
||||||
|
throw new Error("Branch name cannot start or end with '/'");
|
||||||
|
}
|
||||||
|
if (sanitized.endsWith(".lock")) {
|
||||||
|
throw new Error("Branch name cannot end with '.lock'");
|
||||||
|
}
|
||||||
|
return sanitized;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateRepositoryName(name: string): string {
|
||||||
|
const sanitized = name.trim().toLowerCase();
|
||||||
|
if (!sanitized) {
|
||||||
|
throw new Error("Repository name cannot be empty");
|
||||||
|
}
|
||||||
|
if (!/^[a-z0-9_.-]+$/.test(sanitized)) {
|
||||||
|
throw new Error(
|
||||||
|
"Repository name can only contain lowercase letters, numbers, hyphens, periods, and underscores"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (sanitized.startsWith(".") || sanitized.endsWith(".")) {
|
||||||
|
throw new Error("Repository name cannot start or end with a period");
|
||||||
|
}
|
||||||
|
return sanitized;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateOwnerName(owner: string): string {
|
||||||
|
const sanitized = owner.trim().toLowerCase();
|
||||||
|
if (!sanitized) {
|
||||||
|
throw new Error("Owner name cannot be empty");
|
||||||
|
}
|
||||||
|
if (!/^[a-z0-9](?:[a-z0-9]|-(?=[a-z0-9])){0,38}$/.test(sanitized)) {
|
||||||
|
throw new Error(
|
||||||
|
"Owner name must start with a letter or number and can contain up to 39 characters"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return sanitized;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkBranchExists(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
branch: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/branches/${branch}`
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
if (error && typeof error === "object" && "status" in error && error.status === 404) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkUserExists(username: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await githubRequest(`https://api.github.com/users/${username}`);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
if (error && typeof error === "object" && "status" in error && error.status === 404) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/github/common/version.ts
Normal file
1
src/github/common/version.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export const VERSION = "0.6.2";
|
||||||
File diff suppressed because it is too large
Load Diff
112
src/github/operations/branches.ts
Normal file
112
src/github/operations/branches.ts
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest } from "../common/utils.js";
|
||||||
|
import { GitHubReferenceSchema } from "../common/types.js";
|
||||||
|
|
||||||
|
// Schema definitions
|
||||||
|
export const CreateBranchOptionsSchema = z.object({
|
||||||
|
ref: z.string(),
|
||||||
|
sha: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreateBranchSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
branch: z.string().describe("Name for the new branch"),
|
||||||
|
from_branch: z.string().optional().describe("Optional: source branch to create from (defaults to the repository's default branch)"),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Type exports
|
||||||
|
export type CreateBranchOptions = z.infer<typeof CreateBranchOptionsSchema>;
|
||||||
|
|
||||||
|
// Function implementations
|
||||||
|
export async function getDefaultBranchSHA(owner: string, repo: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main`
|
||||||
|
);
|
||||||
|
const data = GitHubReferenceSchema.parse(response);
|
||||||
|
return data.object.sha;
|
||||||
|
} catch (error) {
|
||||||
|
const masterResponse = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master`
|
||||||
|
);
|
||||||
|
if (!masterResponse) {
|
||||||
|
throw new Error("Could not find default branch (tried 'main' and 'master')");
|
||||||
|
}
|
||||||
|
const data = GitHubReferenceSchema.parse(masterResponse);
|
||||||
|
return data.object.sha;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createBranch(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
options: CreateBranchOptions
|
||||||
|
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||||
|
const fullRef = `refs/heads/${options.ref}`;
|
||||||
|
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: {
|
||||||
|
ref: fullRef,
|
||||||
|
sha: options.sha,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubReferenceSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getBranchSHA(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
branch: string
|
||||||
|
): Promise<string> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const data = GitHubReferenceSchema.parse(response);
|
||||||
|
return data.object.sha;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createBranchFromRef(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
newBranch: string,
|
||||||
|
fromBranch?: string
|
||||||
|
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||||
|
let sha: string;
|
||||||
|
if (fromBranch) {
|
||||||
|
sha = await getBranchSHA(owner, repo, fromBranch);
|
||||||
|
} else {
|
||||||
|
sha = await getDefaultBranchSHA(owner, repo);
|
||||||
|
}
|
||||||
|
|
||||||
|
return createBranch(owner, repo, {
|
||||||
|
ref: newBranch,
|
||||||
|
sha,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateBranch(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
branch: string,
|
||||||
|
sha: string
|
||||||
|
): Promise<z.infer<typeof GitHubReferenceSchema>> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`,
|
||||||
|
{
|
||||||
|
method: "PATCH",
|
||||||
|
body: {
|
||||||
|
sha,
|
||||||
|
force: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubReferenceSchema.parse(response);
|
||||||
|
}
|
||||||
26
src/github/operations/commits.ts
Normal file
26
src/github/operations/commits.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||||
|
|
||||||
|
export const ListCommitsSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
sha: z.string().optional(),
|
||||||
|
page: z.number().optional(),
|
||||||
|
perPage: z.number().optional()
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function listCommits(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
page?: number,
|
||||||
|
perPage?: number,
|
||||||
|
sha?: string
|
||||||
|
) {
|
||||||
|
return githubRequest(
|
||||||
|
buildUrl(`https://api.github.com/repos/${owner}/${repo}/commits`, {
|
||||||
|
page: page?.toString(),
|
||||||
|
per_page: perPage?.toString(),
|
||||||
|
sha
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
219
src/github/operations/files.ts
Normal file
219
src/github/operations/files.ts
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest } from "../common/utils.js";
|
||||||
|
import {
|
||||||
|
GitHubContentSchema,
|
||||||
|
GitHubAuthorSchema,
|
||||||
|
GitHubTreeSchema,
|
||||||
|
GitHubCommitSchema,
|
||||||
|
GitHubReferenceSchema,
|
||||||
|
GitHubFileContentSchema,
|
||||||
|
} from "../common/types.js";
|
||||||
|
|
||||||
|
// Schema definitions
|
||||||
|
export const FileOperationSchema = z.object({
|
||||||
|
path: z.string(),
|
||||||
|
content: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreateOrUpdateFileSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
path: z.string().describe("Path where to create/update the file"),
|
||||||
|
content: z.string().describe("Content of the file"),
|
||||||
|
message: z.string().describe("Commit message"),
|
||||||
|
branch: z.string().describe("Branch to create/update the file in"),
|
||||||
|
sha: z.string().optional().describe("SHA of the file being replaced (required when updating existing files)"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetFileContentsSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
path: z.string().describe("Path to the file or directory"),
|
||||||
|
branch: z.string().optional().describe("Branch to get contents from"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const PushFilesSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"),
|
||||||
|
files: z.array(FileOperationSchema).describe("Array of files to push"),
|
||||||
|
message: z.string().describe("Commit message"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GitHubCreateUpdateFileResponseSchema = z.object({
|
||||||
|
content: GitHubFileContentSchema.nullable(),
|
||||||
|
commit: z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
node_id: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
author: GitHubAuthorSchema,
|
||||||
|
committer: GitHubAuthorSchema,
|
||||||
|
message: z.string(),
|
||||||
|
tree: z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
}),
|
||||||
|
parents: z.array(
|
||||||
|
z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Type exports
|
||||||
|
export type FileOperation = z.infer<typeof FileOperationSchema>;
|
||||||
|
export type GitHubCreateUpdateFileResponse = z.infer<typeof GitHubCreateUpdateFileResponseSchema>;
|
||||||
|
|
||||||
|
// Function implementations
|
||||||
|
export async function getFileContents(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
path: string,
|
||||||
|
branch?: string
|
||||||
|
) {
|
||||||
|
let url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;
|
||||||
|
if (branch) {
|
||||||
|
url += `?ref=${branch}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await githubRequest(url);
|
||||||
|
const data = GitHubContentSchema.parse(response);
|
||||||
|
|
||||||
|
// If it's a file, decode the content
|
||||||
|
if (!Array.isArray(data) && data.content) {
|
||||||
|
data.content = Buffer.from(data.content, "base64").toString("utf8");
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createOrUpdateFile(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
path: string,
|
||||||
|
content: string,
|
||||||
|
message: string,
|
||||||
|
branch: string,
|
||||||
|
sha?: string
|
||||||
|
) {
|
||||||
|
const encodedContent = Buffer.from(content).toString("base64");
|
||||||
|
|
||||||
|
let currentSha = sha;
|
||||||
|
if (!currentSha) {
|
||||||
|
try {
|
||||||
|
const existingFile = await getFileContents(owner, repo, path, branch);
|
||||||
|
if (!Array.isArray(existingFile)) {
|
||||||
|
currentSha = existingFile.sha;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Note: File does not exist in branch, will create new file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;
|
||||||
|
const body = {
|
||||||
|
message,
|
||||||
|
content: encodedContent,
|
||||||
|
branch,
|
||||||
|
...(currentSha ? { sha: currentSha } : {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await githubRequest(url, {
|
||||||
|
method: "PUT",
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
return GitHubCreateUpdateFileResponseSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createTree(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
files: FileOperation[],
|
||||||
|
baseTree?: string
|
||||||
|
) {
|
||||||
|
const tree = files.map((file) => ({
|
||||||
|
path: file.path,
|
||||||
|
mode: "100644" as const,
|
||||||
|
type: "blob" as const,
|
||||||
|
content: file.content,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/trees`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: {
|
||||||
|
tree,
|
||||||
|
base_tree: baseTree,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubTreeSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createCommit(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
message: string,
|
||||||
|
tree: string,
|
||||||
|
parents: string[]
|
||||||
|
) {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/commits`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: {
|
||||||
|
message,
|
||||||
|
tree,
|
||||||
|
parents,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubCommitSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function updateReference(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
ref: string,
|
||||||
|
sha: string
|
||||||
|
) {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/${ref}`,
|
||||||
|
{
|
||||||
|
method: "PATCH",
|
||||||
|
body: {
|
||||||
|
sha,
|
||||||
|
force: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubReferenceSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function pushFiles(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
branch: string,
|
||||||
|
files: FileOperation[],
|
||||||
|
message: string
|
||||||
|
) {
|
||||||
|
const refResponse = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const ref = GitHubReferenceSchema.parse(refResponse);
|
||||||
|
const commitSha = ref.object.sha;
|
||||||
|
|
||||||
|
const tree = await createTree(owner, repo, files, commitSha);
|
||||||
|
const commit = await createCommit(owner, repo, message, tree.sha, [commitSha]);
|
||||||
|
return await updateReference(owner, repo, `heads/${branch}`, commit.sha);
|
||||||
|
}
|
||||||
118
src/github/operations/issues.ts
Normal file
118
src/github/operations/issues.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||||
|
|
||||||
|
export const GetIssueSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
issue_number: z.number(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const IssueCommentSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
issue_number: z.number(),
|
||||||
|
body: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreateIssueOptionsSchema = z.object({
|
||||||
|
title: z.string(),
|
||||||
|
body: z.string().optional(),
|
||||||
|
assignees: z.array(z.string()).optional(),
|
||||||
|
milestone: z.number().optional(),
|
||||||
|
labels: z.array(z.string()).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreateIssueSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
...CreateIssueOptionsSchema.shape,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ListIssuesOptionsSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
direction: z.enum(["asc", "desc"]).optional(),
|
||||||
|
labels: z.array(z.string()).optional(),
|
||||||
|
page: z.number().optional(),
|
||||||
|
per_page: z.number().optional(),
|
||||||
|
since: z.string().optional(),
|
||||||
|
sort: z.enum(["created", "updated", "comments"]).optional(),
|
||||||
|
state: z.enum(["open", "closed", "all"]).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UpdateIssueOptionsSchema = z.object({
|
||||||
|
owner: z.string(),
|
||||||
|
repo: z.string(),
|
||||||
|
issue_number: z.number(),
|
||||||
|
title: z.string().optional(),
|
||||||
|
body: z.string().optional(),
|
||||||
|
assignees: z.array(z.string()).optional(),
|
||||||
|
milestone: z.number().optional(),
|
||||||
|
labels: z.array(z.string()).optional(),
|
||||||
|
state: z.enum(["open", "closed"]).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function getIssue(owner: string, repo: string, issue_number: number) {
|
||||||
|
return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function addIssueComment(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
issue_number: number,
|
||||||
|
body: string
|
||||||
|
) {
|
||||||
|
return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}/comments`, {
|
||||||
|
method: "POST",
|
||||||
|
body: { body },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createIssue(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
options: z.infer<typeof CreateIssueOptionsSchema>
|
||||||
|
) {
|
||||||
|
return githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/issues`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: options,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listIssues(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
options: Omit<z.infer<typeof ListIssuesOptionsSchema>, "owner" | "repo">
|
||||||
|
) {
|
||||||
|
const urlParams: Record<string, string | undefined> = {
|
||||||
|
direction: options.direction,
|
||||||
|
labels: options.labels?.join(","),
|
||||||
|
page: options.page?.toString(),
|
||||||
|
per_page: options.per_page?.toString(),
|
||||||
|
since: options.since,
|
||||||
|
sort: options.sort,
|
||||||
|
state: options.state
|
||||||
|
};
|
||||||
|
|
||||||
|
return githubRequest(
|
||||||
|
buildUrl(`https://api.github.com/repos/${owner}/${repo}/issues`, urlParams)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateIssue(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
issue_number: number,
|
||||||
|
options: Omit<z.infer<typeof UpdateIssueOptionsSchema>, "owner" | "repo" | "issue_number">
|
||||||
|
) {
|
||||||
|
return githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`,
|
||||||
|
{
|
||||||
|
method: "PATCH",
|
||||||
|
body: options,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
302
src/github/operations/pulls.ts
Normal file
302
src/github/operations/pulls.ts
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest } from "../common/utils.js";
|
||||||
|
import {
|
||||||
|
GitHubPullRequestSchema,
|
||||||
|
GitHubIssueAssigneeSchema,
|
||||||
|
GitHubRepositorySchema,
|
||||||
|
} from "../common/types.js";
|
||||||
|
|
||||||
|
// Schema definitions
|
||||||
|
export const PullRequestFileSchema = z.object({
|
||||||
|
sha: z.string(),
|
||||||
|
filename: z.string(),
|
||||||
|
status: z.enum(['added', 'removed', 'modified', 'renamed', 'copied', 'changed', 'unchanged']),
|
||||||
|
additions: z.number(),
|
||||||
|
deletions: z.number(),
|
||||||
|
changes: z.number(),
|
||||||
|
blob_url: z.string(),
|
||||||
|
raw_url: z.string(),
|
||||||
|
contents_url: z.string(),
|
||||||
|
patch: z.string().optional()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const StatusCheckSchema = z.object({
|
||||||
|
url: z.string(),
|
||||||
|
state: z.enum(['error', 'failure', 'pending', 'success']),
|
||||||
|
description: z.string().nullable(),
|
||||||
|
target_url: z.string().nullable(),
|
||||||
|
context: z.string(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CombinedStatusSchema = z.object({
|
||||||
|
state: z.enum(['error', 'failure', 'pending', 'success']),
|
||||||
|
statuses: z.array(StatusCheckSchema),
|
||||||
|
sha: z.string(),
|
||||||
|
total_count: z.number()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const PullRequestCommentSchema = z.object({
|
||||||
|
url: z.string(),
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
pull_request_review_id: z.number().nullable(),
|
||||||
|
diff_hunk: z.string(),
|
||||||
|
path: z.string().nullable(),
|
||||||
|
position: z.number().nullable(),
|
||||||
|
original_position: z.number().nullable(),
|
||||||
|
commit_id: z.string(),
|
||||||
|
original_commit_id: z.string(),
|
||||||
|
user: GitHubIssueAssigneeSchema,
|
||||||
|
body: z.string(),
|
||||||
|
created_at: z.string(),
|
||||||
|
updated_at: z.string(),
|
||||||
|
html_url: z.string(),
|
||||||
|
pull_request_url: z.string(),
|
||||||
|
author_association: z.string(),
|
||||||
|
_links: z.object({
|
||||||
|
self: z.object({ href: z.string() }),
|
||||||
|
html: z.object({ href: z.string() }),
|
||||||
|
pull_request: z.object({ href: z.string() })
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const PullRequestReviewSchema = z.object({
|
||||||
|
id: z.number(),
|
||||||
|
node_id: z.string(),
|
||||||
|
user: GitHubIssueAssigneeSchema,
|
||||||
|
body: z.string().nullable(),
|
||||||
|
state: z.enum(['APPROVED', 'CHANGES_REQUESTED', 'COMMENTED', 'DISMISSED', 'PENDING']),
|
||||||
|
html_url: z.string(),
|
||||||
|
pull_request_url: z.string(),
|
||||||
|
commit_id: z.string(),
|
||||||
|
submitted_at: z.string().nullable(),
|
||||||
|
author_association: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Input schemas
|
||||||
|
export const CreatePullRequestSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
title: z.string().describe("Pull request title"),
|
||||||
|
body: z.string().optional().describe("Pull request body/description"),
|
||||||
|
head: z.string().describe("The name of the branch where your changes are implemented"),
|
||||||
|
base: z.string().describe("The name of the branch you want the changes pulled into"),
|
||||||
|
draft: z.boolean().optional().describe("Whether to create the pull request as a draft"),
|
||||||
|
maintainer_can_modify: z.boolean().optional().describe("Whether maintainers can modify the pull request")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetPullRequestSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ListPullRequestsSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
state: z.enum(['open', 'closed', 'all']).optional().describe("State of the pull requests to return"),
|
||||||
|
head: z.string().optional().describe("Filter by head user or head organization and branch name"),
|
||||||
|
base: z.string().optional().describe("Filter by base branch name"),
|
||||||
|
sort: z.enum(['created', 'updated', 'popularity', 'long-running']).optional().describe("What to sort results by"),
|
||||||
|
direction: z.enum(['asc', 'desc']).optional().describe("The direction of the sort"),
|
||||||
|
per_page: z.number().optional().describe("Results per page (max 100)"),
|
||||||
|
page: z.number().optional().describe("Page number of the results")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CreatePullRequestReviewSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number"),
|
||||||
|
commit_id: z.string().optional().describe("The SHA of the commit that needs a review"),
|
||||||
|
body: z.string().describe("The body text of the review"),
|
||||||
|
event: z.enum(['APPROVE', 'REQUEST_CHANGES', 'COMMENT']).describe("The review action to perform"),
|
||||||
|
comments: z.array(z.object({
|
||||||
|
path: z.string().describe("The relative path to the file being commented on"),
|
||||||
|
position: z.number().describe("The position in the diff where you want to add a review comment"),
|
||||||
|
body: z.string().describe("Text of the review comment")
|
||||||
|
})).optional().describe("Comments to post as part of the review")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const MergePullRequestSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number"),
|
||||||
|
commit_title: z.string().optional().describe("Title for the automatic commit message"),
|
||||||
|
commit_message: z.string().optional().describe("Extra detail to append to automatic commit message"),
|
||||||
|
merge_method: z.enum(['merge', 'squash', 'rebase']).optional().describe("Merge method to use")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetPullRequestFilesSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetPullRequestStatusSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UpdatePullRequestBranchSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number"),
|
||||||
|
expected_head_sha: z.string().optional().describe("The expected SHA of the pull request's HEAD ref")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetPullRequestCommentsSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const GetPullRequestReviewsSchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
pull_number: z.number().describe("Pull request number")
|
||||||
|
});
|
||||||
|
|
||||||
|
// Function implementations
|
||||||
|
export async function createPullRequest(
|
||||||
|
params: z.infer<typeof CreatePullRequestSchema>
|
||||||
|
): Promise<z.infer<typeof GitHubPullRequestSchema>> {
|
||||||
|
const { owner, repo, ...options } = CreatePullRequestSchema.parse(params);
|
||||||
|
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: options,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return GitHubPullRequestSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPullRequest(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number
|
||||||
|
): Promise<z.infer<typeof GitHubPullRequestSchema>> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}`
|
||||||
|
);
|
||||||
|
return GitHubPullRequestSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listPullRequests(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
options: Omit<z.infer<typeof ListPullRequestsSchema>, 'owner' | 'repo'>
|
||||||
|
): Promise<z.infer<typeof GitHubPullRequestSchema>[]> {
|
||||||
|
const url = new URL(`https://api.github.com/repos/${owner}/${repo}/pulls`);
|
||||||
|
|
||||||
|
if (options.state) url.searchParams.append('state', options.state);
|
||||||
|
if (options.head) url.searchParams.append('head', options.head);
|
||||||
|
if (options.base) url.searchParams.append('base', options.base);
|
||||||
|
if (options.sort) url.searchParams.append('sort', options.sort);
|
||||||
|
if (options.direction) url.searchParams.append('direction', options.direction);
|
||||||
|
if (options.per_page) url.searchParams.append('per_page', options.per_page.toString());
|
||||||
|
if (options.page) url.searchParams.append('page', options.page.toString());
|
||||||
|
|
||||||
|
const response = await githubRequest(url.toString());
|
||||||
|
return z.array(GitHubPullRequestSchema).parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createPullRequestReview(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number,
|
||||||
|
options: Omit<z.infer<typeof CreatePullRequestReviewSchema>, 'owner' | 'repo' | 'pull_number'>
|
||||||
|
): Promise<z.infer<typeof PullRequestReviewSchema>> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
body: options,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
return PullRequestReviewSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function mergePullRequest(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number,
|
||||||
|
options: Omit<z.infer<typeof MergePullRequestSchema>, 'owner' | 'repo' | 'pull_number'>
|
||||||
|
): Promise<any> {
|
||||||
|
return githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/merge`,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
body: options,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPullRequestFiles(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number
|
||||||
|
): Promise<z.infer<typeof PullRequestFileSchema>[]> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/files`
|
||||||
|
);
|
||||||
|
return z.array(PullRequestFileSchema).parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updatePullRequestBranch(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number,
|
||||||
|
expectedHeadSha?: string
|
||||||
|
): Promise<void> {
|
||||||
|
await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/update-branch`,
|
||||||
|
{
|
||||||
|
method: "PUT",
|
||||||
|
body: expectedHeadSha ? { expected_head_sha: expectedHeadSha } : undefined,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPullRequestComments(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number
|
||||||
|
): Promise<z.infer<typeof PullRequestCommentSchema>[]> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/comments`
|
||||||
|
);
|
||||||
|
return z.array(PullRequestCommentSchema).parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPullRequestReviews(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number
|
||||||
|
): Promise<z.infer<typeof PullRequestReviewSchema>[]> {
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews`
|
||||||
|
);
|
||||||
|
return z.array(PullRequestReviewSchema).parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPullRequestStatus(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
pullNumber: number
|
||||||
|
): Promise<z.infer<typeof CombinedStatusSchema>> {
|
||||||
|
// First get the PR to get the head SHA
|
||||||
|
const pr = await getPullRequest(owner, repo, pullNumber);
|
||||||
|
const sha = pr.head.sha;
|
||||||
|
|
||||||
|
// Then get the combined status for that SHA
|
||||||
|
const response = await githubRequest(
|
||||||
|
`https://api.github.com/repos/${owner}/${repo}/commits/${sha}/status`
|
||||||
|
);
|
||||||
|
return CombinedStatusSchema.parse(response);
|
||||||
|
}
|
||||||
65
src/github/operations/repository.ts
Normal file
65
src/github/operations/repository.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest } from "../common/utils.js";
|
||||||
|
import { GitHubRepositorySchema, GitHubSearchResponseSchema } from "../common/types.js";
|
||||||
|
|
||||||
|
// Schema definitions
|
||||||
|
export const CreateRepositoryOptionsSchema = z.object({
|
||||||
|
name: z.string().describe("Repository name"),
|
||||||
|
description: z.string().optional().describe("Repository description"),
|
||||||
|
private: z.boolean().optional().describe("Whether the repository should be private"),
|
||||||
|
autoInit: z.boolean().optional().describe("Initialize with README.md"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SearchRepositoriesSchema = z.object({
|
||||||
|
query: z.string().describe("Search query (see GitHub search syntax)"),
|
||||||
|
page: z.number().optional().describe("Page number for pagination (default: 1)"),
|
||||||
|
perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ForkRepositorySchema = z.object({
|
||||||
|
owner: z.string().describe("Repository owner (username or organization)"),
|
||||||
|
repo: z.string().describe("Repository name"),
|
||||||
|
organization: z.string().optional().describe("Optional: organization to fork to (defaults to your personal account)"),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Type exports
|
||||||
|
export type CreateRepositoryOptions = z.infer<typeof CreateRepositoryOptionsSchema>;
|
||||||
|
|
||||||
|
// Function implementations
|
||||||
|
export async function createRepository(options: CreateRepositoryOptions) {
|
||||||
|
const response = await githubRequest("https://api.github.com/user/repos", {
|
||||||
|
method: "POST",
|
||||||
|
body: options,
|
||||||
|
});
|
||||||
|
return GitHubRepositorySchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function searchRepositories(
|
||||||
|
query: string,
|
||||||
|
page: number = 1,
|
||||||
|
perPage: number = 30
|
||||||
|
) {
|
||||||
|
const url = new URL("https://api.github.com/search/repositories");
|
||||||
|
url.searchParams.append("q", query);
|
||||||
|
url.searchParams.append("page", page.toString());
|
||||||
|
url.searchParams.append("per_page", perPage.toString());
|
||||||
|
|
||||||
|
const response = await githubRequest(url.toString());
|
||||||
|
return GitHubSearchResponseSchema.parse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function forkRepository(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
organization?: string
|
||||||
|
) {
|
||||||
|
const url = organization
|
||||||
|
? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}`
|
||||||
|
: `https://api.github.com/repos/${owner}/${repo}/forks`;
|
||||||
|
|
||||||
|
const response = await githubRequest(url, { method: "POST" });
|
||||||
|
return GitHubRepositorySchema.extend({
|
||||||
|
parent: GitHubRepositorySchema,
|
||||||
|
source: GitHubRepositorySchema,
|
||||||
|
}).parse(response);
|
||||||
|
}
|
||||||
45
src/github/operations/search.ts
Normal file
45
src/github/operations/search.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { githubRequest, buildUrl } from "../common/utils.js";
|
||||||
|
|
||||||
|
export const SearchOptions = z.object({
|
||||||
|
q: z.string(),
|
||||||
|
order: z.enum(["asc", "desc"]).optional(),
|
||||||
|
page: z.number().min(1).optional(),
|
||||||
|
per_page: z.number().min(1).max(100).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SearchUsersOptions = SearchOptions.extend({
|
||||||
|
sort: z.enum(["followers", "repositories", "joined"]).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SearchIssuesOptions = SearchOptions.extend({
|
||||||
|
sort: z.enum([
|
||||||
|
"comments",
|
||||||
|
"reactions",
|
||||||
|
"reactions-+1",
|
||||||
|
"reactions--1",
|
||||||
|
"reactions-smile",
|
||||||
|
"reactions-thinking_face",
|
||||||
|
"reactions-heart",
|
||||||
|
"reactions-tada",
|
||||||
|
"interactions",
|
||||||
|
"created",
|
||||||
|
"updated",
|
||||||
|
]).optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SearchCodeSchema = SearchOptions;
|
||||||
|
export const SearchUsersSchema = SearchUsersOptions;
|
||||||
|
export const SearchIssuesSchema = SearchIssuesOptions;
|
||||||
|
|
||||||
|
export async function searchCode(params: z.infer<typeof SearchCodeSchema>) {
|
||||||
|
return githubRequest(buildUrl("https://api.github.com/search/code", params));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function searchIssues(params: z.infer<typeof SearchIssuesSchema>) {
|
||||||
|
return githubRequest(buildUrl("https://api.github.com/search/issues", params));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function searchUsers(params: z.infer<typeof SearchUsersSchema>) {
|
||||||
|
return githubRequest(buildUrl("https://api.github.com/search/users", params));
|
||||||
|
}
|
||||||
@@ -23,6 +23,7 @@
|
|||||||
"@types/node": "^22",
|
"@types/node": "^22",
|
||||||
"@types/node-fetch": "^2.6.12",
|
"@types/node-fetch": "^2.6.12",
|
||||||
"node-fetch": "^3.3.2",
|
"node-fetch": "^3.3.2",
|
||||||
|
"universal-user-agent": "^7.0.2",
|
||||||
"zod": "^3.22.4",
|
"zod": "^3.22.4",
|
||||||
"zod-to-json-schema": "^3.23.5"
|
"zod-to-json-schema": "^3.23.5"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,719 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
// Base schemas for common types
|
|
||||||
export const GitHubAuthorSchema = z.object({
|
|
||||||
name: z.string(),
|
|
||||||
email: z.string(),
|
|
||||||
date: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Repository related schemas
|
|
||||||
export const GitHubOwnerSchema = z.object({
|
|
||||||
login: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
avatar_url: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
type: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubRepositorySchema = z.object({
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
full_name: z.string(),
|
|
||||||
private: z.boolean(),
|
|
||||||
owner: GitHubOwnerSchema,
|
|
||||||
html_url: z.string(),
|
|
||||||
description: z.string().nullable(),
|
|
||||||
fork: z.boolean(),
|
|
||||||
url: z.string(),
|
|
||||||
created_at: z.string(),
|
|
||||||
updated_at: z.string(),
|
|
||||||
pushed_at: z.string(),
|
|
||||||
git_url: z.string(),
|
|
||||||
ssh_url: z.string(),
|
|
||||||
clone_url: z.string(),
|
|
||||||
default_branch: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// File content schemas
|
|
||||||
export const GitHubFileContentSchema = z.object({
|
|
||||||
type: z.string(),
|
|
||||||
encoding: z.string(),
|
|
||||||
size: z.number(),
|
|
||||||
name: z.string(),
|
|
||||||
path: z.string(),
|
|
||||||
content: z.string(),
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
git_url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
download_url: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubDirectoryContentSchema = z.object({
|
|
||||||
type: z.string(),
|
|
||||||
size: z.number(),
|
|
||||||
name: z.string(),
|
|
||||||
path: z.string(),
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
git_url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
download_url: z.string().nullable(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubContentSchema = z.union([
|
|
||||||
GitHubFileContentSchema,
|
|
||||||
z.array(GitHubDirectoryContentSchema),
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Operation schemas
|
|
||||||
export const FileOperationSchema = z.object({
|
|
||||||
path: z.string(),
|
|
||||||
content: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Tree and commit schemas
|
|
||||||
export const GitHubTreeEntrySchema = z.object({
|
|
||||||
path: z.string(),
|
|
||||||
mode: z.enum(["100644", "100755", "040000", "160000", "120000"]),
|
|
||||||
type: z.enum(["blob", "tree", "commit"]),
|
|
||||||
size: z.number().optional(),
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubTreeSchema = z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
tree: z.array(GitHubTreeEntrySchema),
|
|
||||||
truncated: z.boolean(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubListCommitsSchema = z.array(z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
node_id: z.string(),
|
|
||||||
commit: z.object({
|
|
||||||
author: GitHubAuthorSchema,
|
|
||||||
committer: GitHubAuthorSchema,
|
|
||||||
message: z.string(),
|
|
||||||
tree: z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string()
|
|
||||||
}),
|
|
||||||
url: z.string(),
|
|
||||||
comment_count: z.number(),
|
|
||||||
}),
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
comments_url: z.string()
|
|
||||||
}));
|
|
||||||
|
|
||||||
export const GitHubCommitSchema = z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
node_id: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
author: GitHubAuthorSchema,
|
|
||||||
committer: GitHubAuthorSchema,
|
|
||||||
message: z.string(),
|
|
||||||
tree: z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
}),
|
|
||||||
parents: z.array(
|
|
||||||
z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Reference schema
|
|
||||||
export const GitHubReferenceSchema = z.object({
|
|
||||||
ref: z.string(),
|
|
||||||
node_id: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
object: z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
type: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Input schemas for operations
|
|
||||||
export const CreateRepositoryOptionsSchema = z.object({
|
|
||||||
name: z.string(),
|
|
||||||
description: z.string().optional(),
|
|
||||||
private: z.boolean().optional(),
|
|
||||||
auto_init: z.boolean().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateIssueOptionsSchema = z.object({
|
|
||||||
title: z.string(),
|
|
||||||
body: z.string().optional(),
|
|
||||||
assignees: z.array(z.string()).optional(),
|
|
||||||
milestone: z.number().optional(),
|
|
||||||
labels: z.array(z.string()).optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreatePullRequestOptionsSchema = z.object({
|
|
||||||
title: z.string(),
|
|
||||||
body: z.string().optional(),
|
|
||||||
head: z.string(),
|
|
||||||
base: z.string(),
|
|
||||||
maintainer_can_modify: z.boolean().optional(),
|
|
||||||
draft: z.boolean().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateBranchOptionsSchema = z.object({
|
|
||||||
ref: z.string(),
|
|
||||||
sha: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Response schemas for operations
|
|
||||||
export const GitHubCreateUpdateFileResponseSchema = z.object({
|
|
||||||
content: GitHubFileContentSchema.nullable(),
|
|
||||||
commit: z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
node_id: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
author: GitHubAuthorSchema,
|
|
||||||
committer: GitHubAuthorSchema,
|
|
||||||
message: z.string(),
|
|
||||||
tree: z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
}),
|
|
||||||
parents: z.array(
|
|
||||||
z.object({
|
|
||||||
sha: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubSearchResponseSchema = z.object({
|
|
||||||
total_count: z.number(),
|
|
||||||
incomplete_results: z.boolean(),
|
|
||||||
items: z.array(GitHubRepositorySchema),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Fork related schemas
|
|
||||||
export const GitHubForkParentSchema = z.object({
|
|
||||||
name: z.string(),
|
|
||||||
full_name: z.string(),
|
|
||||||
owner: z.object({
|
|
||||||
login: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
avatar_url: z.string(),
|
|
||||||
}),
|
|
||||||
html_url: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubForkSchema = GitHubRepositorySchema.extend({
|
|
||||||
parent: GitHubForkParentSchema,
|
|
||||||
source: GitHubForkParentSchema,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Issue related schemas
|
|
||||||
export const GitHubLabelSchema = z.object({
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
color: z.string(),
|
|
||||||
default: z.boolean(),
|
|
||||||
description: z.string().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubIssueAssigneeSchema = z.object({
|
|
||||||
login: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
avatar_url: z.string(),
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubMilestoneSchema = z.object({
|
|
||||||
url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
labels_url: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
number: z.number(),
|
|
||||||
title: z.string(),
|
|
||||||
description: z.string(),
|
|
||||||
state: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubIssueSchema = z.object({
|
|
||||||
url: z.string(),
|
|
||||||
repository_url: z.string(),
|
|
||||||
labels_url: z.string(),
|
|
||||||
comments_url: z.string(),
|
|
||||||
events_url: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
number: z.number(),
|
|
||||||
title: z.string(),
|
|
||||||
user: GitHubIssueAssigneeSchema,
|
|
||||||
labels: z.array(GitHubLabelSchema),
|
|
||||||
state: z.string(),
|
|
||||||
locked: z.boolean(),
|
|
||||||
assignee: GitHubIssueAssigneeSchema.nullable(),
|
|
||||||
assignees: z.array(GitHubIssueAssigneeSchema),
|
|
||||||
milestone: GitHubMilestoneSchema.nullable(),
|
|
||||||
comments: z.number(),
|
|
||||||
created_at: z.string(),
|
|
||||||
updated_at: z.string(),
|
|
||||||
closed_at: z.string().nullable(),
|
|
||||||
body: z.string().nullable(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Pull Request related schemas
|
|
||||||
export const GitHubPullRequestHeadSchema = z.object({
|
|
||||||
label: z.string(),
|
|
||||||
ref: z.string(),
|
|
||||||
sha: z.string(),
|
|
||||||
user: GitHubIssueAssigneeSchema,
|
|
||||||
repo: GitHubRepositorySchema,
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GitHubPullRequestSchema = z.object({
|
|
||||||
url: z.string(),
|
|
||||||
id: z.number(),
|
|
||||||
node_id: z.string(),
|
|
||||||
html_url: z.string(),
|
|
||||||
diff_url: z.string(),
|
|
||||||
patch_url: z.string(),
|
|
||||||
issue_url: z.string(),
|
|
||||||
number: z.number(),
|
|
||||||
state: z.string(),
|
|
||||||
locked: z.boolean(),
|
|
||||||
title: z.string(),
|
|
||||||
user: GitHubIssueAssigneeSchema,
|
|
||||||
body: z.string(),
|
|
||||||
created_at: z.string(),
|
|
||||||
updated_at: z.string(),
|
|
||||||
closed_at: z.string().nullable(),
|
|
||||||
merged_at: z.string().nullable(),
|
|
||||||
merge_commit_sha: z.string().nullable(),
|
|
||||||
assignee: GitHubIssueAssigneeSchema.nullable(),
|
|
||||||
assignees: z.array(GitHubIssueAssigneeSchema),
|
|
||||||
head: GitHubPullRequestHeadSchema,
|
|
||||||
base: GitHubPullRequestHeadSchema,
|
|
||||||
});
|
|
||||||
|
|
||||||
const RepoParamsSchema = z.object({
|
|
||||||
owner: z.string().describe("Repository owner (username or organization)"),
|
|
||||||
repo: z.string().describe("Repository name"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateOrUpdateFileSchema = RepoParamsSchema.extend({
|
|
||||||
path: z.string().describe("Path where to create/update the file"),
|
|
||||||
content: z.string().describe("Content of the file"),
|
|
||||||
message: z.string().describe("Commit message"),
|
|
||||||
branch: z.string().describe("Branch to create/update the file in"),
|
|
||||||
sha: z
|
|
||||||
.string()
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
"SHA of the file being replaced (required when updating existing files)"
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const SearchRepositoriesSchema = z.object({
|
|
||||||
query: z.string().describe("Search query (see GitHub search syntax)"),
|
|
||||||
page: z
|
|
||||||
.number()
|
|
||||||
.optional()
|
|
||||||
.describe("Page number for pagination (default: 1)"),
|
|
||||||
perPage: z
|
|
||||||
.number()
|
|
||||||
.optional()
|
|
||||||
.describe("Number of results per page (default: 30, max: 100)"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const ListCommitsSchema = z.object({
|
|
||||||
owner: z.string().describe("Repository owner (username or organization)"),
|
|
||||||
repo: z.string().describe("Repository name"),
|
|
||||||
page: z.number().optional().describe("Page number for pagination (default: 1)"),
|
|
||||||
perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"),
|
|
||||||
sha: z.string().optional()
|
|
||||||
.describe("SHA of the file being replaced (required when updating existing files)")
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateRepositorySchema = z.object({
|
|
||||||
name: z.string().describe("Repository name"),
|
|
||||||
description: z.string().optional().describe("Repository description"),
|
|
||||||
private: z
|
|
||||||
.boolean()
|
|
||||||
.optional()
|
|
||||||
.describe("Whether the repository should be private"),
|
|
||||||
autoInit: z.boolean().optional().describe("Initialize with README.md"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GetFileContentsSchema = RepoParamsSchema.extend({
|
|
||||||
path: z.string().describe("Path to the file or directory"),
|
|
||||||
branch: z.string().optional().describe("Branch to get contents from"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const PushFilesSchema = RepoParamsSchema.extend({
|
|
||||||
branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"),
|
|
||||||
files: z
|
|
||||||
.array(
|
|
||||||
z.object({
|
|
||||||
path: z.string().describe("Path where to create the file"),
|
|
||||||
content: z.string().describe("Content of the file"),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.describe("Array of files to push"),
|
|
||||||
message: z.string().describe("Commit message"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateIssueSchema = RepoParamsSchema.extend({
|
|
||||||
title: z.string().describe("Issue title"),
|
|
||||||
body: z.string().optional().describe("Issue body/description"),
|
|
||||||
assignees: z
|
|
||||||
.array(z.string())
|
|
||||||
.optional()
|
|
||||||
.describe("Array of usernames to assign"),
|
|
||||||
labels: z.array(z.string()).optional().describe("Array of label names"),
|
|
||||||
milestone: z.number().optional().describe("Milestone number to assign"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreatePullRequestSchema = RepoParamsSchema.extend({
|
|
||||||
title: z.string().describe("Pull request title"),
|
|
||||||
body: z.string().optional().describe("Pull request body/description"),
|
|
||||||
head: z
|
|
||||||
.string()
|
|
||||||
.describe("The name of the branch where your changes are implemented"),
|
|
||||||
base: z
|
|
||||||
.string()
|
|
||||||
.describe("The name of the branch you want the changes pulled into"),
|
|
||||||
draft: z
|
|
||||||
.boolean()
|
|
||||||
.optional()
|
|
||||||
.describe("Whether to create the pull request as a draft"),
|
|
||||||
maintainer_can_modify: z
|
|
||||||
.boolean()
|
|
||||||
.optional()
|
|
||||||
.describe("Whether maintainers can modify the pull request"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const ForkRepositorySchema = RepoParamsSchema.extend({
|
|
||||||
organization: z
|
|
||||||
.string()
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
"Optional: organization to fork to (defaults to your personal account)"
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const CreateBranchSchema = RepoParamsSchema.extend({
|
|
||||||
branch: z.string().describe("Name for the new branch"),
|
|
||||||
from_branch: z
|
|
||||||
.string()
|
|
||||||
.optional()
|
|
||||||
.describe(
|
|
||||||
"Optional: source branch to create from (defaults to the repository's default branch)"
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for a code search result item
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code
|
|
||||||
*/
|
|
||||||
export const SearchCodeItemSchema = z.object({
|
|
||||||
name: z.string().describe("The name of the file"),
|
|
||||||
path: z.string().describe("The path to the file in the repository"),
|
|
||||||
sha: z.string().describe("The SHA hash of the file"),
|
|
||||||
url: z.string().describe("The API URL for this file"),
|
|
||||||
git_url: z.string().describe("The Git URL for this file"),
|
|
||||||
html_url: z.string().describe("The HTML URL to view this file on GitHub"),
|
|
||||||
repository: GitHubRepositorySchema.describe(
|
|
||||||
"The repository where this file was found"
|
|
||||||
),
|
|
||||||
score: z.number().describe("The search result score"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for code search results
|
|
||||||
*/
|
|
||||||
export const SearchCodeResponseSchema = z.object({
|
|
||||||
total_count: z.number().describe("Total number of matching results"),
|
|
||||||
incomplete_results: z
|
|
||||||
.boolean()
|
|
||||||
.describe("Whether the results are incomplete"),
|
|
||||||
items: z.array(SearchCodeItemSchema).describe("The search results"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for an issue search result item
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests
|
|
||||||
*/
|
|
||||||
export const SearchIssueItemSchema = z.object({
|
|
||||||
url: z.string().describe("The API URL for this issue"),
|
|
||||||
repository_url: z
|
|
||||||
.string()
|
|
||||||
.describe("The API URL for the repository where this issue was found"),
|
|
||||||
labels_url: z.string().describe("The API URL for the labels of this issue"),
|
|
||||||
comments_url: z.string().describe("The API URL for comments of this issue"),
|
|
||||||
events_url: z.string().describe("The API URL for events of this issue"),
|
|
||||||
html_url: z.string().describe("The HTML URL to view this issue on GitHub"),
|
|
||||||
id: z.number().describe("The ID of this issue"),
|
|
||||||
node_id: z.string().describe("The Node ID of this issue"),
|
|
||||||
number: z.number().describe("The number of this issue"),
|
|
||||||
title: z.string().describe("The title of this issue"),
|
|
||||||
user: GitHubIssueAssigneeSchema.describe("The user who created this issue"),
|
|
||||||
labels: z.array(GitHubLabelSchema).describe("The labels of this issue"),
|
|
||||||
state: z.string().describe("The state of this issue"),
|
|
||||||
locked: z.boolean().describe("Whether this issue is locked"),
|
|
||||||
assignee: GitHubIssueAssigneeSchema.nullable().describe(
|
|
||||||
"The assignee of this issue"
|
|
||||||
),
|
|
||||||
assignees: z
|
|
||||||
.array(GitHubIssueAssigneeSchema)
|
|
||||||
.describe("The assignees of this issue"),
|
|
||||||
comments: z.number().describe("The number of comments on this issue"),
|
|
||||||
created_at: z.string().describe("The creation time of this issue"),
|
|
||||||
updated_at: z.string().describe("The last update time of this issue"),
|
|
||||||
closed_at: z.string().nullable().describe("The closure time of this issue"),
|
|
||||||
body: z.string().describe("The body of this issue"),
|
|
||||||
score: z.number().describe("The search result score"),
|
|
||||||
pull_request: z
|
|
||||||
.object({
|
|
||||||
url: z.string().describe("The API URL for this pull request"),
|
|
||||||
html_url: z.string().describe("The HTML URL to view this pull request"),
|
|
||||||
diff_url: z.string().describe("The URL to view the diff"),
|
|
||||||
patch_url: z.string().describe("The URL to view the patch"),
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe("Pull request details if this is a PR"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for issue search results
|
|
||||||
*/
|
|
||||||
export const SearchIssuesResponseSchema = z.object({
|
|
||||||
total_count: z.number().describe("Total number of matching results"),
|
|
||||||
incomplete_results: z
|
|
||||||
.boolean()
|
|
||||||
.describe("Whether the results are incomplete"),
|
|
||||||
items: z.array(SearchIssueItemSchema).describe("The search results"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for a user search result item
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users
|
|
||||||
*/
|
|
||||||
export const SearchUserItemSchema = z.object({
|
|
||||||
login: z.string().describe("The username of the user"),
|
|
||||||
id: z.number().describe("The ID of the user"),
|
|
||||||
node_id: z.string().describe("The Node ID of the user"),
|
|
||||||
avatar_url: z.string().describe("The avatar URL of the user"),
|
|
||||||
gravatar_id: z.string().describe("The Gravatar ID of the user"),
|
|
||||||
url: z.string().describe("The API URL for this user"),
|
|
||||||
html_url: z.string().describe("The HTML URL to view this user on GitHub"),
|
|
||||||
followers_url: z.string().describe("The API URL for followers of this user"),
|
|
||||||
following_url: z.string().describe("The API URL for following of this user"),
|
|
||||||
gists_url: z.string().describe("The API URL for gists of this user"),
|
|
||||||
starred_url: z
|
|
||||||
.string()
|
|
||||||
.describe("The API URL for starred repositories of this user"),
|
|
||||||
subscriptions_url: z
|
|
||||||
.string()
|
|
||||||
.describe("The API URL for subscriptions of this user"),
|
|
||||||
organizations_url: z
|
|
||||||
.string()
|
|
||||||
.describe("The API URL for organizations of this user"),
|
|
||||||
repos_url: z.string().describe("The API URL for repositories of this user"),
|
|
||||||
events_url: z.string().describe("The API URL for events of this user"),
|
|
||||||
received_events_url: z
|
|
||||||
.string()
|
|
||||||
.describe("The API URL for received events of this user"),
|
|
||||||
type: z.string().describe("The type of this user"),
|
|
||||||
site_admin: z.boolean().describe("Whether this user is a site administrator"),
|
|
||||||
score: z.number().describe("The search result score"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response schema for user search results
|
|
||||||
*/
|
|
||||||
export const SearchUsersResponseSchema = z.object({
|
|
||||||
total_count: z.number().describe("Total number of matching results"),
|
|
||||||
incomplete_results: z
|
|
||||||
.boolean()
|
|
||||||
.describe("Whether the results are incomplete"),
|
|
||||||
items: z.array(SearchUserItemSchema).describe("The search results"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Input schema for code search
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code--parameters
|
|
||||||
*/
|
|
||||||
export const SearchCodeSchema = z.object({
|
|
||||||
q: z
|
|
||||||
.string()
|
|
||||||
.describe(
|
|
||||||
"Search query. See GitHub code search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-code"
|
|
||||||
),
|
|
||||||
order: z
|
|
||||||
.enum(["asc", "desc"])
|
|
||||||
.optional()
|
|
||||||
.describe("Sort order (asc or desc)"),
|
|
||||||
per_page: z
|
|
||||||
.number()
|
|
||||||
.min(1)
|
|
||||||
.max(100)
|
|
||||||
.optional()
|
|
||||||
.describe("Results per page (max 100)"),
|
|
||||||
page: z.number().min(1).optional().describe("Page number"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Input schema for issues search
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests--parameters
|
|
||||||
*/
|
|
||||||
export const SearchIssuesSchema = z.object({
|
|
||||||
q: z
|
|
||||||
.string()
|
|
||||||
.describe(
|
|
||||||
"Search query. See GitHub issues search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-issues-and-pull-requests"
|
|
||||||
),
|
|
||||||
sort: z
|
|
||||||
.enum([
|
|
||||||
"comments",
|
|
||||||
"reactions",
|
|
||||||
"reactions-+1",
|
|
||||||
"reactions--1",
|
|
||||||
"reactions-smile",
|
|
||||||
"reactions-thinking_face",
|
|
||||||
"reactions-heart",
|
|
||||||
"reactions-tada",
|
|
||||||
"interactions",
|
|
||||||
"created",
|
|
||||||
"updated",
|
|
||||||
])
|
|
||||||
.optional()
|
|
||||||
.describe("Sort field"),
|
|
||||||
order: z
|
|
||||||
.enum(["asc", "desc"])
|
|
||||||
.optional()
|
|
||||||
.describe("Sort order (asc or desc)"),
|
|
||||||
per_page: z
|
|
||||||
.number()
|
|
||||||
.min(1)
|
|
||||||
.max(100)
|
|
||||||
.optional()
|
|
||||||
.describe("Results per page (max 100)"),
|
|
||||||
page: z.number().min(1).optional().describe("Page number"),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Input schema for users search
|
|
||||||
* @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users--parameters
|
|
||||||
*/
|
|
||||||
export const SearchUsersSchema = z.object({
|
|
||||||
q: z
|
|
||||||
.string()
|
|
||||||
.describe(
|
|
||||||
"Search query. See GitHub users search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-users"
|
|
||||||
),
|
|
||||||
sort: z
|
|
||||||
.enum(["followers", "repositories", "joined"])
|
|
||||||
.optional()
|
|
||||||
.describe("Sort field"),
|
|
||||||
order: z
|
|
||||||
.enum(["asc", "desc"])
|
|
||||||
.optional()
|
|
||||||
.describe("Sort order (asc or desc)"),
|
|
||||||
per_page: z
|
|
||||||
.number()
|
|
||||||
.min(1)
|
|
||||||
.max(100)
|
|
||||||
.optional()
|
|
||||||
.describe("Results per page (max 100)"),
|
|
||||||
page: z.number().min(1).optional().describe("Page number"),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add these schema definitions for issue management
|
|
||||||
|
|
||||||
export const ListIssuesOptionsSchema = z.object({
|
|
||||||
owner: z.string(),
|
|
||||||
repo: z.string(),
|
|
||||||
state: z.enum(['open', 'closed', 'all']).optional(),
|
|
||||||
labels: z.array(z.string()).optional(),
|
|
||||||
sort: z.enum(['created', 'updated', 'comments']).optional(),
|
|
||||||
direction: z.enum(['asc', 'desc']).optional(),
|
|
||||||
since: z.string().optional(), // ISO 8601 timestamp
|
|
||||||
page: z.number().optional(),
|
|
||||||
per_page: z.number().optional()
|
|
||||||
});
|
|
||||||
|
|
||||||
export const UpdateIssueOptionsSchema = z.object({
|
|
||||||
owner: z.string(),
|
|
||||||
repo: z.string(),
|
|
||||||
issue_number: z.number(),
|
|
||||||
title: z.string().optional(),
|
|
||||||
body: z.string().optional(),
|
|
||||||
state: z.enum(['open', 'closed']).optional(),
|
|
||||||
labels: z.array(z.string()).optional(),
|
|
||||||
assignees: z.array(z.string()).optional(),
|
|
||||||
milestone: z.number().optional()
|
|
||||||
});
|
|
||||||
|
|
||||||
export const IssueCommentSchema = z.object({
|
|
||||||
owner: z.string(),
|
|
||||||
repo: z.string(),
|
|
||||||
issue_number: z.number(),
|
|
||||||
body: z.string()
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GetIssueSchema = z.object({
|
|
||||||
owner: z.string().describe("Repository owner (username or organization)"),
|
|
||||||
repo: z.string().describe("Repository name"),
|
|
||||||
issue_number: z.number().describe("Issue number")
|
|
||||||
});
|
|
||||||
|
|
||||||
// Export types
|
|
||||||
export type GitHubAuthor = z.infer<typeof GitHubAuthorSchema>;
|
|
||||||
export type GitHubFork = z.infer<typeof GitHubForkSchema>;
|
|
||||||
export type GitHubIssue = z.infer<typeof GitHubIssueSchema>;
|
|
||||||
export type GitHubPullRequest = z.infer<typeof GitHubPullRequestSchema>;
|
|
||||||
export type GitHubRepository = z.infer<typeof GitHubRepositorySchema>;
|
|
||||||
export type GitHubFileContent = z.infer<typeof GitHubFileContentSchema>;
|
|
||||||
export type GitHubDirectoryContent = z.infer<
|
|
||||||
typeof GitHubDirectoryContentSchema
|
|
||||||
>;
|
|
||||||
export type GitHubContent = z.infer<typeof GitHubContentSchema>;
|
|
||||||
export type FileOperation = z.infer<typeof FileOperationSchema>;
|
|
||||||
export type GitHubTree = z.infer<typeof GitHubTreeSchema>;
|
|
||||||
export type GitHubCommit = z.infer<typeof GitHubCommitSchema>;
|
|
||||||
export type GitHubListCommits = z.infer<typeof GitHubListCommitsSchema>;
|
|
||||||
export type GitHubReference = z.infer<typeof GitHubReferenceSchema>;
|
|
||||||
export type CreateRepositoryOptions = z.infer<
|
|
||||||
typeof CreateRepositoryOptionsSchema
|
|
||||||
>;
|
|
||||||
export type CreateIssueOptions = z.infer<typeof CreateIssueOptionsSchema>;
|
|
||||||
export type CreatePullRequestOptions = z.infer<
|
|
||||||
typeof CreatePullRequestOptionsSchema
|
|
||||||
>;
|
|
||||||
export type CreateBranchOptions = z.infer<typeof CreateBranchOptionsSchema>;
|
|
||||||
export type GitHubCreateUpdateFileResponse = z.infer<
|
|
||||||
typeof GitHubCreateUpdateFileResponseSchema
|
|
||||||
>;
|
|
||||||
export type GitHubSearchResponse = z.infer<typeof GitHubSearchResponseSchema>;
|
|
||||||
export type SearchCodeItem = z.infer<typeof SearchCodeItemSchema>;
|
|
||||||
export type SearchCodeResponse = z.infer<typeof SearchCodeResponseSchema>;
|
|
||||||
export type SearchIssueItem = z.infer<typeof SearchIssueItemSchema>;
|
|
||||||
export type SearchIssuesResponse = z.infer<typeof SearchIssuesResponseSchema>;
|
|
||||||
export type SearchUserItem = z.infer<typeof SearchUserItemSchema>;
|
|
||||||
export type SearchUsersResponse = z.infer<typeof SearchUsersResponseSchema>;
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/gitlab /app
|
COPY src/gitlab /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -117,6 +117,8 @@ Add the following to your `claude_desktop_config.json`:
|
|||||||
"command": "docker",
|
"command": "docker",
|
||||||
"args": [
|
"args": [
|
||||||
"run",
|
"run",
|
||||||
|
"--rm",
|
||||||
|
"-i",
|
||||||
"-e",
|
"-e",
|
||||||
"GITLAB_PERSONAL_ACCESS_TOKEN",
|
"GITLAB_PERSONAL_ACCESS_TOKEN",
|
||||||
"-e",
|
"-e",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||||
COPY src/google-maps /app
|
COPY src/google-maps /app
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ Add the following to your `claude_desktop_config.json`:
|
|||||||
Docker build:
|
Docker build:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t vonwig/google-maps:mcp -f src/google-maps/Dockerfile .
|
docker build -t mcp/google-maps -f src/google-maps/Dockerfile .
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/memory /app
|
COPY src/memory /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -137,7 +137,7 @@ Add this to your claude_desktop_config.json:
|
|||||||
"mcpServers": {
|
"mcpServers": {
|
||||||
"memory": {
|
"memory": {
|
||||||
"command": "docker",
|
"command": "docker",
|
||||||
"args": ["run", "-i", "--rm", "mcp/memory"]
|
"args": ["run", "-i", "-v", "claude-memory:/app/dist", "--rm", "mcp/memory"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -158,6 +158,29 @@ Add this to your claude_desktop_config.json:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### NPX with custom setting
|
||||||
|
|
||||||
|
The server can be configured using the following environment variables:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"memory": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": [
|
||||||
|
"-y",
|
||||||
|
"@modelcontextprotocol/server-memory"
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"MEMORY_FILE_PATH": "/path/to/custom/memory.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- `MEMORY_FILE_PATH`: Path to the memory storage JSON file (default: `memory.json` in the server directory)
|
||||||
|
|
||||||
### System Prompt
|
### System Prompt
|
||||||
|
|
||||||
The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created.
|
The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created.
|
||||||
|
|||||||
@@ -10,10 +10,15 @@ import { promises as fs } from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
// Define memory file path using environment variable with fallback
|
||||||
|
const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.json');
|
||||||
|
|
||||||
// Define the path to the JSONL file, you can change this to your desired local path
|
// If MEMORY_FILE_PATH is just a filename, put it in the same directory as the script
|
||||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
const MEMORY_FILE_PATH = process.env.MEMORY_FILE_PATH
|
||||||
const MEMORY_FILE_PATH = path.join(__dirname, 'memory.json');
|
? path.isAbsolute(process.env.MEMORY_FILE_PATH)
|
||||||
|
? process.env.MEMORY_FILE_PATH
|
||||||
|
: path.join(path.dirname(fileURLToPath(import.meta.url)), process.env.MEMORY_FILE_PATH)
|
||||||
|
: defaultMemoryPath;
|
||||||
|
|
||||||
// We are storing our memory using entities, relations, and observations in a graph structure
|
// We are storing our memory using entities, relations, and observations in a graph structure
|
||||||
interface Entity {
|
interface Entity {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@modelcontextprotocol/server-memory",
|
"name": "@modelcontextprotocol/server-memory",
|
||||||
"version": "0.6.2",
|
"version": "0.6.3",
|
||||||
"description": "MCP server for enabling memory for Claude through a knowledge graph",
|
"description": "MCP server for enabling memory for Claude through a knowledge graph",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/postgres /app
|
COPY src/postgres /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
23
src/redis/Dockerfile
Normal file
23
src/redis/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
FROM node:22.12-alpine as builder
|
||||||
|
|
||||||
|
COPY src/redis /app
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/root/.npm npm install
|
||||||
|
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
FROM node:22-alpine AS release
|
||||||
|
|
||||||
|
COPY --from=builder /app/build /app/build
|
||||||
|
COPY --from=builder /app/package.json /app/package.json
|
||||||
|
COPY --from=builder /app/package-lock.json /app/package-lock.json
|
||||||
|
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN npm ci --ignore-scripts --omit-dev
|
||||||
|
|
||||||
|
ENTRYPOINT ["node", "build/index.js"]
|
||||||
80
src/redis/README.md
Normal file
80
src/redis/README.md
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# Redis
|
||||||
|
|
||||||
|
A Model Context Protocol server that provides access to Redis databases. This server enables LLMs to interact with Redis key-value stores through a set of standardized tools.
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### Tools
|
||||||
|
|
||||||
|
- **set**
|
||||||
|
- Set a Redis key-value pair with optional expiration
|
||||||
|
- Input:
|
||||||
|
- `key` (string): Redis key
|
||||||
|
- `value` (string): Value to store
|
||||||
|
- `expireSeconds` (number, optional): Expiration time in seconds
|
||||||
|
|
||||||
|
- **get**
|
||||||
|
- Get value by key from Redis
|
||||||
|
- Input: `key` (string): Redis key to retrieve
|
||||||
|
|
||||||
|
- **delete**
|
||||||
|
- Delete one or more keys from Redis
|
||||||
|
- Input: `key` (string | string[]): Key or array of keys to delete
|
||||||
|
|
||||||
|
- **list**
|
||||||
|
- List Redis keys matching a pattern
|
||||||
|
- Input: `pattern` (string, optional): Pattern to match keys (default: *)
|
||||||
|
|
||||||
|
## Usage with Claude Desktop
|
||||||
|
|
||||||
|
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
|
||||||
|
|
||||||
|
### Docker
|
||||||
|
|
||||||
|
* when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost)
|
||||||
|
* Redis URL can be specified as an argument, defaults to "redis://localhost:6379"
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"redis": {
|
||||||
|
"command": "docker",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-i",
|
||||||
|
"--rm",
|
||||||
|
"mcp/redis",
|
||||||
|
"redis://host.docker.internal:6379"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### NPX
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"redis": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": [
|
||||||
|
"-y",
|
||||||
|
"@modelcontextprotocol/server-redis",
|
||||||
|
"redis://localhost:6379"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Building
|
||||||
|
|
||||||
|
Docker:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
docker build -t mcp/redis -f src/redis/Dockerfile .
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
||||||
28
src/redis/package.json
Normal file
28
src/redis/package.json
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"name": "redis",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"type": "module",
|
||||||
|
"bin": {
|
||||||
|
"redis": "./build/index.js"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\""
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"build"
|
||||||
|
],
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"description": "",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.10.2",
|
||||||
|
"typescript": "^5.7.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@modelcontextprotocol/sdk": "^0.4.0",
|
||||||
|
"@types/redis": "^4.0.10",
|
||||||
|
"redis": "^4.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
236
src/redis/src/index.ts
Normal file
236
src/redis/src/index.ts
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||||
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||||
|
import {
|
||||||
|
CallToolRequestSchema,
|
||||||
|
ListToolsRequestSchema,
|
||||||
|
} from "@modelcontextprotocol/sdk/types.js";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { createClient } from 'redis';
|
||||||
|
|
||||||
|
// Get Redis URL from command line args or use default
|
||||||
|
const REDIS_URL = process.argv[2] || "redis://localhost:6379";
|
||||||
|
const redisClient = createClient({
|
||||||
|
url: REDIS_URL
|
||||||
|
});
|
||||||
|
|
||||||
|
// Define Zod schemas for validation
|
||||||
|
const SetArgumentsSchema = z.object({
|
||||||
|
key: z.string(),
|
||||||
|
value: z.string(),
|
||||||
|
expireSeconds: z.number().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const GetArgumentsSchema = z.object({
|
||||||
|
key: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const DeleteArgumentsSchema = z.object({
|
||||||
|
key: z.string().or(z.array(z.string())),
|
||||||
|
});
|
||||||
|
|
||||||
|
const ListArgumentsSchema = z.object({
|
||||||
|
pattern: z.string().default("*"),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create server instance
|
||||||
|
const server = new Server(
|
||||||
|
{
|
||||||
|
name: "redis",
|
||||||
|
version: "1.0.0"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// List available tools
|
||||||
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
|
return {
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
name: "set",
|
||||||
|
description: "Set a Redis key-value pair with optional expiration",
|
||||||
|
inputSchema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
key: {
|
||||||
|
type: "string",
|
||||||
|
description: "Redis key",
|
||||||
|
},
|
||||||
|
value: {
|
||||||
|
type: "string",
|
||||||
|
description: "Value to store",
|
||||||
|
},
|
||||||
|
expireSeconds: {
|
||||||
|
type: "number",
|
||||||
|
description: "Optional expiration time in seconds",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["key", "value"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "get",
|
||||||
|
description: "Get value by key from Redis",
|
||||||
|
inputSchema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
key: {
|
||||||
|
type: "string",
|
||||||
|
description: "Redis key to retrieve",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["key"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "delete",
|
||||||
|
description: "Delete one or more keys from Redis",
|
||||||
|
inputSchema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
key: {
|
||||||
|
oneOf: [
|
||||||
|
{ type: "string" },
|
||||||
|
{ type: "array", items: { type: "string" } }
|
||||||
|
],
|
||||||
|
description: "Key or array of keys to delete",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["key"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "list",
|
||||||
|
description: "List Redis keys matching a pattern",
|
||||||
|
inputSchema: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
pattern: {
|
||||||
|
type: "string",
|
||||||
|
description: "Pattern to match keys (default: *)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle tool execution
|
||||||
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||||
|
const { name, arguments: args } = request.params;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (name === "set") {
|
||||||
|
const { key, value, expireSeconds } = SetArgumentsSchema.parse(args);
|
||||||
|
|
||||||
|
if (expireSeconds) {
|
||||||
|
await redisClient.setEx(key, expireSeconds, value);
|
||||||
|
} else {
|
||||||
|
await redisClient.set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `Successfully set key: ${key}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
} else if (name === "get") {
|
||||||
|
const { key } = GetArgumentsSchema.parse(args);
|
||||||
|
const value = await redisClient.get(key);
|
||||||
|
|
||||||
|
if (value === null) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `Key not found: ${key}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `${value}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
} else if (name === "delete") {
|
||||||
|
const { key } = DeleteArgumentsSchema.parse(args);
|
||||||
|
|
||||||
|
if (Array.isArray(key)) {
|
||||||
|
await redisClient.del(key);
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `Successfully deleted ${key.length} keys`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
await redisClient.del(key);
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `Successfully deleted key: ${key}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else if (name === "list") {
|
||||||
|
const { pattern } = ListArgumentsSchema.parse(args);
|
||||||
|
const keys = await redisClient.keys(pattern);
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: keys.length > 0
|
||||||
|
? `Found keys:\n${keys.join('\n')}`
|
||||||
|
: "No keys found matching pattern",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unknown tool: ${name}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid arguments: ${error.errors
|
||||||
|
.map((e) => `${e.path.join(".")}: ${e.message}`)
|
||||||
|
.join(", ")}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start the server
|
||||||
|
async function main() {
|
||||||
|
try {
|
||||||
|
// Connect to Redis
|
||||||
|
redisClient.on('error', (err: Error) => console.error('Redis Client Error', err));
|
||||||
|
await redisClient.connect();
|
||||||
|
console.error(`Connected to Redis successfully at ${REDIS_URL}`);
|
||||||
|
|
||||||
|
const transport = new StdioServerTransport();
|
||||||
|
await server.connect(transport);
|
||||||
|
console.error("Redis MCP Server running on stdio");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error during startup:", error);
|
||||||
|
await redisClient.quit();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((error) => {
|
||||||
|
console.error("Fatal error in main():", error);
|
||||||
|
redisClient.quit().finally(() => process.exit(1));
|
||||||
|
});
|
||||||
16
src/redis/tsconfig.json
Normal file
16
src/redis/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "Node16",
|
||||||
|
"moduleResolution": "Node16",
|
||||||
|
"outDir": "./build",
|
||||||
|
"rootDir": "./src",
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules"]
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
COPY src/sequentialthinking /app
|
COPY src/sequentialthinking /app
|
||||||
COPY tsconfig.json /tsconfig.json
|
COPY tsconfig.json /tsconfig.json
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ Add this to your `claude_desktop_config.json`:
|
|||||||
Docker:
|
Docker:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t mcp/sequentialthinking -f sequentialthinking/Dockerfile .
|
docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile .
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.12-alpine as builder
|
FROM node:22.12-alpine AS builder
|
||||||
|
|
||||||
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
|
||||||
COPY src/slack /app
|
COPY src/slack /app
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ const replyToThreadTool: Tool = {
|
|||||||
},
|
},
|
||||||
thread_ts: {
|
thread_ts: {
|
||||||
type: "string",
|
type: "string",
|
||||||
description: "The timestamp of the parent message",
|
description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.",
|
||||||
},
|
},
|
||||||
text: {
|
text: {
|
||||||
type: "string",
|
type: "string",
|
||||||
@@ -168,7 +168,7 @@ const getThreadRepliesTool: Tool = {
|
|||||||
},
|
},
|
||||||
thread_ts: {
|
thread_ts: {
|
||||||
type: "string",
|
type: "string",
|
||||||
description: "The timestamp of the parent message",
|
description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
required: ["channel_id", "thread_ts"],
|
required: ["channel_id", "thread_ts"],
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import logging
|
import logging
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
@@ -9,6 +11,12 @@ import mcp.server.stdio
|
|||||||
from pydantic import AnyUrl
|
from pydantic import AnyUrl
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
# reconfigure UnicodeEncodeError prone default (i.e. windows-1252) to utf-8
|
||||||
|
if sys.platform == "win32" and os.environ.get('PYTHONIOENCODING') is None:
|
||||||
|
sys.stdin.reconfigure(encoding="utf-8")
|
||||||
|
sys.stdout.reconfigure(encoding="utf-8")
|
||||||
|
sys.stderr.reconfigure(encoding="utf-8")
|
||||||
|
|
||||||
logger = logging.getLogger('mcp_sqlite_server')
|
logger = logging.getLogger('mcp_sqlite_server')
|
||||||
logger.info("Starting MCP SQLite Server")
|
logger.info("Starting MCP SQLite Server")
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user