Skip to content

Commit 71da7b2

Browse files
authored
Merge branch 'main' into feat/add-start-scripts-everything-server
2 parents 49088d0 + 01b5cd5 commit 71da7b2

File tree

21 files changed

+171
-130
lines changed

21 files changed

+171
-130
lines changed

.github/workflows/release.yml

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ jobs:
6767
needs: [create-metadata]
6868
if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }}
6969
runs-on: ubuntu-latest
70+
environment: release
7071
outputs:
7172
changes_made: ${{ steps.commit.outputs.changes_made }}
7273
steps:
@@ -170,7 +171,7 @@ jobs:
170171
working-directory: src/${{ matrix.package }}
171172
run: |
172173
VERSION=$(jq -r .version package.json)
173-
if npm view --json | jq --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then
174+
if npm view --json | jq -e --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then
174175
echo "Version $VERSION already exists on npm"
175176
exit 1
176177
fi
@@ -210,3 +211,11 @@ jobs:
210211
gh release create "$VERSION" \
211212
--title "Release $VERSION" \
212213
--notes-file RELEASE_NOTES.md
214+
215+
- name: Docker MCP images
216+
uses: peter-evans/repository-dispatch@v3
217+
with:
218+
token: ${{ secrets.DOCKER_TOKEN }}
219+
repository: docker/labs-ai-tools-for-devs
220+
event-type: build-mcp-images
221+
client-payload: '{"ref": "${{ needs.create-metadata.outputs.version }}"}'

README.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ Official integrations are maintained by companies building production ready MCP
4949
- <img height="12" width="12" src="https://www.meilisearch.com/favicon.ico" alt="Meilisearch Logo" /> **[Meilisearch](https://github.com/meilisearch/meilisearch-mcp)** - Interact & query with Meilisearch (Full-text & semantic search API)
5050
- <img height="12" width="12" src="https://metoro.io/static/images/logos/Metoro.svg" /> **[Metoro](https://github.com/metoro-io/metoro-mcp-server)** - Query and interact with kubernetes environments monitored by Metoro
5151
- <img height="12" width="12" src="https://www.motherduck.com/favicon.ico" alt="MotherDuck Logo" /> **[MotherDuck](https://github.com/motherduckdb/mcp-server-motherduck)** - Query and analyze data with MotherDuck and local DuckDB
52+
- <img height="12" width="12" src="https://needle-ai.com/images/needle-logo-orange-2-rounded.png" alt="Needle AI Logo" /> **[Needle](https://github.com/needle-ai/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents.
5253
- <img height="12" width="12" src="https://neo4j.com/favicon.ico" alt="Neo4j Logo" /> **[Neo4j](https://github.com/neo4j-contrib/mcp-neo4j/)** - Neo4j graph database server (schema + read/write-cypher) and separate graph database backed memory
5354
- **[Neon](https://github.com/neondatabase/mcp-server-neon)** - Interact with the Neon serverless Postgres platform
5455
- <img height="12" width="12" src="https://qdrant.tech/img/brand-resources-logos/logomark.svg" /> **[Qdrant](https://github.com/qdrant/mcp-server-qdrant/)** - Implement semantic memory layer on top of the Qdrant vector search engine
@@ -72,15 +73,19 @@ A growing set of community-developed and maintained servers demonstrates various
7273
- **[Atlassian](https://github.com/sooperset/mcp-atlassian)** - Interact with Atlassian Cloud products (Confluence and Jira) including searching/reading Confluence spaces/pages, accessing Jira issues, and project metadata.
7374
- **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery.
7475
- **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities
76+
- **[ChatMCP](https://github.com/AI-QL/chat-mcp)** – An Open Source Cross-platform GUI Desktop application compatible with Linux, macOS, and Windows, enabling seamless interaction with MCP servers across dynamically selectable LLMs, by **[AIQL](https://github.com/AI-QL)**
7577
- **[ChatSum](https://github.com/mcpso/mcp-server-chatsum)** - Query and Summarize chat messages with LLM. by [mcpso](https://mcp.so)
7678
- **[Chroma](https://github.com/privetin/chroma)** - Vector database server for semantic document search and metadata filtering, built on Chroma
7779
- **[Cloudinary](https://github.com/felores/cloudinary-mcp-server)** - Cloudinary Model Context Protocol Server to upload media to Cloudinary and get back the media link and details.
7880
- **[cognee-mcp](https://github.com/topoteretes/cognee-mcp-server)** - GraphRAG memory server with customizable ingestion, data processing and search
7981
- **[coin_api_mcp](https://github.com/longmans/coin_api_mcp)** - Provides access to [coinmarketcap](https://coinmarketcap.com/) cryptocurrency data.
8082
- **[Contentful-mcp](https://github.com/ivo-toby/contentful-mcp)** - Read, update, delete, publish content in your [Contentful](https://contentful.com) space(s) from this MCP Server.
8183
- **[Data Exploration](https://github.com/reading-plus-ai/mcp-server-data-exploration)** - MCP server for autonomous data exploration on .csv-based datasets, providing intelligent insights with minimal effort. NOTE: Will execute arbitrary Python code on your machine, please use with caution!
84+
- **[Dataset Viewer](https://github.com/privetin/dataset-viewer)** - Browse and analyze Hugging Face datasets with features like search, filtering, statistics, and data export
8285
- **[DevRev](https://github.com/kpsunil97/devrev-mcp-server)** - An MCP server to integrate with DevRev APIs to search through your DevRev Knowledge Graph where objects can be imported from diff. sources listed [here](https://devrev.ai/docs/import#available-sources).
86+
- **[Dify](https://github.com/YanxingLiu/dify-mcp-server)** - A simple implementation of an MCP server for dify workflows.
8387
- **[Docker](https://github.com/ckreiling/mcp-server-docker)** - Integrate with Docker to manage containers, images, volumes, and networks.
88+
- **[Drupal](https://github.com/Omedia/mcp-server-drupal)** - Server for interacting with [Drupal](https://www.drupal.org/project/mcp) using STDIO transport layer.
8489
- **[Elasticsearch](https://github.com/cr7258/elasticsearch-mcp-server)** - MCP server implementation that provides Elasticsearch interaction.
8590
- **[Fetch](https://github.com/zcaceres/fetch-mcp)** - A server that flexibly fetches HTML, JSON, Markdown, or plaintext.
8691
- **[FireCrawl](https://github.com/vrknetha/mcp-server-firecrawl)** - Advanced web scraping with JavaScript rendering, PDF support, and smart rate limiting
@@ -104,16 +109,18 @@ A growing set of community-developed and maintained servers demonstrates various
104109
- **[MySQL](https://github.com/benborla/mcp-server-mysql)** (by benborla) - MySQL database integration in NodeJS with configurable access controls and schema inspection
105110
- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** (by DesignComputer) - MySQL database integration in Python with configurable access controls and schema inspection
106111
- **[NS Travel Information](https://github.com/r-huijts/ns-mcp-server)** - Access Dutch Railways (NS) real-time train travel information and disruptions through the official NS API.
107-
- **[Needle](https://github.com/JANHMS/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents.
108112
- **[Notion](https://github.com/suekou/mcp-notion-server)** (by suekou) - Interact with Notion API.
109113
- **[Notion](https://github.com/v-3/notion-server)** (by v-3) - Notion MCP integration. Search, Read, Update, and Create pages through Claude chat.
110114
- **[oatpp-mcp](https://github.com/oatpp/oatpp-mcp)** - C++ MCP integration for Oat++. Use [Oat++](https://oatpp.io) to build MCP servers.
111115
- **[Obsidian Markdown Notes](https://github.com/calclavia/mcp-obsidian)** - Read and search through your Obsidian vault or any directory containing Markdown notes
112116
- **[OpenAPI](https://github.com/snaggle-ai/openapi-mcp-server)** - Interact with [OpenAPI](https://www.openapis.org/) APIs.
117+
- **[OpenCTI](https://github.com/Spathodea-Network/opencti-mcp)** - Interact with OpenCTI platform to retrieve threat intelligence data including reports, indicators, malware and threat actors.
113118
- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org).
114119
- **[Pandoc](https://github.com/vivekVells/mcp-pandoc)** - MCP server for seamless document format conversion using Pandoc, supporting Markdown, HTML, and plain text, with other formats like PDF, csv and docx in development.
115120
- **[Pinecone](https://github.com/sirmews/mcp-pinecone)** - MCP server for searching and uploading records to Pinecone. Allows for simple RAG features, leveraging Pinecone's Inference API.
121+
- **[Placid.app](https://github.com/felores/placid-mcp-server)** - Generate image and video creatives using Placid.app templates
116122
- **[Playwright](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright
123+
- **[Postman](https://github.com/shannonlal/mcp-postman)** - MCP server for running Postman Collections locally via Newman. Allows for simple execution of Postman Server and returns the results of whether the collection passed all the tests.
117124
- **[RAG Web Browser](https://github.com/apify/mcp-server-rag-web-browser)** An MCP server for Apify's RAG Web Browser Actor to perform web searches, scrape URLs, and return content in Markdown.
118125
- **[Rememberizer AI](https://github.com/skydeckai/mcp-server-rememberizer)** - An MCP server designed for interacting with the Rememberizer data source, facilitating enhanced knowledge retrieval.
119126
- **[Salesforce MCP](https://github.com/smn2gnt/MCP-Salesforce)** - Interact with Salesforce Data and Metadata
@@ -152,6 +159,7 @@ Additional resources on MCP.
152159
- **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)**
153160
- **[mcp-manager](https://github.com/zueai/mcp-manager)** - Simple Web UI to install and manage MCP servers for Claude Desktop by **[Zue](https://github.com/zueai)**
154161
- **[MCPHub](https://github.com/Jeamee/MCPHub-Desktop)** – An Open Source MacOS & Windows GUI Desktop app for discovering, installing and managing MCP servers by **[Jeamee](https://github.com/jeamee)**
162+
- **[mcp.run](https://mcp.run)** - A hosted registry and control plane to install & run secure + portable MCP Servers.
155163
- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)**
156164
- **[PulseMCP](https://www.pulsemcp.com)** ([API](https://www.pulsemcp.com/api)) - Community hub & weekly newsletter for discovering MCP servers, clients, articles, and news by **[Tadas Antanavicius](https://github.com/tadasant)**, **[Mike Coughlin](https://github.com/macoughl)**, and **[Ravina Patel](https://github.com/ravinahp)**
157165
- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**

package-lock.json

Lines changed: 13 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/aws-kb-retrieval-server/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM node:22.12-alpine as builder
1+
FROM node:22.12-alpine AS builder
22

33
COPY src/aws-kb-retrieval-server /app
44
COPY tsconfig.json /tsconfig.json

src/brave-search/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM node:22.12-alpine as builder
1+
FROM node:22.12-alpine AS builder
22

33
# Must be entire project because `prepare` script is run during `npm install` and requires all files.
44
COPY src/brave-search /app

src/everart/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM node:22.12-alpine as builder
1+
FROM node:22.12-alpine AS builder
22

33
COPY src/everart /app
44
COPY tsconfig.json /tsconfig.json

src/everything/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM node:22.12-alpine as builder
1+
FROM node:22.12-alpine AS builder
22

33
COPY src/everything /app
44
COPY tsconfig.json /tsconfig.json

src/fetch/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ classifiers = [
1717
]
1818
dependencies = [
1919
"markdownify>=0.13.1",
20-
"mcp>=1.0.0",
20+
"mcp>=1.1.3",
2121
"protego>=0.3.1",
2222
"pydantic>=2.0.0",
2323
"readabilipy>=0.2.0",

src/fetch/src/mcp_server_fetch/server.py

Lines changed: 21 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from mcp.server import Server
88
from mcp.server.stdio import stdio_server
99
from mcp.types import (
10+
ErrorData,
1011
GetPromptResult,
1112
Prompt,
1213
PromptArgument,
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
7980
headers={"User-Agent": user_agent},
8081
)
8182
except HTTPError:
82-
raise McpError(
83-
INTERNAL_ERROR,
84-
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
85-
)
83+
raise McpError(ErrorData(
84+
code=INTERNAL_ERROR,
85+
message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
86+
))
8687
if response.status_code in (401, 403):
87-
raise McpError(
88-
INTERNAL_ERROR,
89-
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
90-
)
88+
raise McpError(ErrorData(
89+
code=INTERNAL_ERROR,
90+
message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
91+
))
9192
elif 400 <= response.status_code < 500:
9293
return
9394
robot_txt = response.text
@@ -96,15 +97,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
9697
)
9798
robot_parser = Protego.parse(processed_robot_txt)
9899
if not robot_parser.can_fetch(str(url), user_agent):
99-
raise McpError(
100-
INTERNAL_ERROR,
101-
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
100+
raise McpError(ErrorData(
101+
code=INTERNAL_ERROR,
102+
message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
102103
f"<useragent>{user_agent}</useragent>\n"
103104
f"<url>{url}</url>"
104105
f"<robots>\n{robot_txt}\n</robots>\n"
105106
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
106107
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
107-
)
108+
))
108109

109110

110111
async def fetch_url(
@@ -124,12 +125,12 @@ async def fetch_url(
124125
timeout=30,
125126
)
126127
except HTTPError as e:
127-
raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
128+
raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}"))
128129
if response.status_code >= 400:
129-
raise McpError(
130-
INTERNAL_ERROR,
131-
f"Failed to fetch {url} - status code {response.status_code}",
132-
)
130+
raise McpError(ErrorData(
131+
code=INTERNAL_ERROR,
132+
message=f"Failed to fetch {url} - status code {response.status_code}",
133+
))
133134

134135
page_raw = response.text
135136

@@ -221,11 +222,11 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
221222
try:
222223
args = Fetch(**arguments)
223224
except ValueError as e:
224-
raise McpError(INVALID_PARAMS, str(e))
225+
raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e)))
225226

226227
url = str(args.url)
227228
if not url:
228-
raise McpError(INVALID_PARAMS, "URL is required")
229+
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
229230

230231
if not ignore_robots_txt:
231232
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
@@ -253,7 +254,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
253254
@server.get_prompt()
254255
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
255256
if not arguments or "url" not in arguments:
256-
raise McpError(INVALID_PARAMS, "URL is required")
257+
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
257258

258259
url = arguments["url"]
259260

0 commit comments

Comments
 (0)