mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-04-20 12:55:21 +02:00
Merge pull request #507 from alezkv/alezkv/update-error-handling
Update error handling to use ErrorData consistently
This commit is contained in:
@@ -17,7 +17,7 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"markdownify>=0.13.1",
|
"markdownify>=0.13.1",
|
||||||
"mcp>=1.0.0",
|
"mcp>=1.1.3",
|
||||||
"protego>=0.3.1",
|
"protego>=0.3.1",
|
||||||
"pydantic>=2.0.0",
|
"pydantic>=2.0.0",
|
||||||
"readabilipy>=0.2.0",
|
"readabilipy>=0.2.0",
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from mcp.shared.exceptions import McpError
|
|||||||
from mcp.server import Server
|
from mcp.server import Server
|
||||||
from mcp.server.stdio import stdio_server
|
from mcp.server.stdio import stdio_server
|
||||||
from mcp.types import (
|
from mcp.types import (
|
||||||
|
ErrorData,
|
||||||
GetPromptResult,
|
GetPromptResult,
|
||||||
Prompt,
|
Prompt,
|
||||||
PromptArgument,
|
PromptArgument,
|
||||||
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
|||||||
headers={"User-Agent": user_agent},
|
headers={"User-Agent": user_agent},
|
||||||
)
|
)
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
INTERNAL_ERROR,
|
||||||
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
|
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
|
||||||
)
|
))
|
||||||
if response.status_code in (401, 403):
|
if response.status_code in (401, 403):
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
INTERNAL_ERROR,
|
||||||
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
|
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
|
||||||
)
|
))
|
||||||
elif 400 <= response.status_code < 500:
|
elif 400 <= response.status_code < 500:
|
||||||
return
|
return
|
||||||
robot_txt = response.text
|
robot_txt = response.text
|
||||||
@@ -96,7 +97,7 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
|||||||
)
|
)
|
||||||
robot_parser = Protego.parse(processed_robot_txt)
|
robot_parser = Protego.parse(processed_robot_txt)
|
||||||
if not robot_parser.can_fetch(str(url), user_agent):
|
if not robot_parser.can_fetch(str(url), user_agent):
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
INTERNAL_ERROR,
|
||||||
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
|
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
|
||||||
f"<useragent>{user_agent}</useragent>\n"
|
f"<useragent>{user_agent}</useragent>\n"
|
||||||
@@ -104,7 +105,7 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
|||||||
f"<robots>\n{robot_txt}\n</robots>\n"
|
f"<robots>\n{robot_txt}\n</robots>\n"
|
||||||
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
|
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
|
||||||
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
|
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
|
||||||
)
|
))
|
||||||
|
|
||||||
|
|
||||||
async def fetch_url(
|
async def fetch_url(
|
||||||
@@ -124,12 +125,12 @@ async def fetch_url(
|
|||||||
timeout=30,
|
timeout=30,
|
||||||
)
|
)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
|
raise McpError(ErrorData(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}"))
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
raise McpError(
|
raise McpError(ErrorData(
|
||||||
INTERNAL_ERROR,
|
INTERNAL_ERROR,
|
||||||
f"Failed to fetch {url} - status code {response.status_code}",
|
f"Failed to fetch {url} - status code {response.status_code}",
|
||||||
)
|
))
|
||||||
|
|
||||||
page_raw = response.text
|
page_raw = response.text
|
||||||
|
|
||||||
@@ -221,11 +222,11 @@ Although originally you did not have internet access, and were advised to refuse
|
|||||||
try:
|
try:
|
||||||
args = Fetch(**arguments)
|
args = Fetch(**arguments)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise McpError(INVALID_PARAMS, str(e))
|
raise McpError(ErrorData(INVALID_PARAMS, str(e)))
|
||||||
|
|
||||||
url = str(args.url)
|
url = str(args.url)
|
||||||
if not url:
|
if not url:
|
||||||
raise McpError(INVALID_PARAMS, "URL is required")
|
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
|
||||||
|
|
||||||
if not ignore_robots_txt:
|
if not ignore_robots_txt:
|
||||||
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
|
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
|
||||||
@@ -253,7 +254,7 @@ Although originally you did not have internet access, and were advised to refuse
|
|||||||
@server.get_prompt()
|
@server.get_prompt()
|
||||||
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
|
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
|
||||||
if not arguments or "url" not in arguments:
|
if not arguments or "url" not in arguments:
|
||||||
raise McpError(INVALID_PARAMS, "URL is required")
|
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
|
||||||
|
|
||||||
url = arguments["url"]
|
url = arguments["url"]
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user