Skip to content

Commit 3bf9d2c

Browse files
authoredJan 15, 2025··
Merge pull request #507 from alezkv/alezkv/update-error-handling
Update error handling to use ErrorData consistently
2 parents 0fb5880 + ebecd59 commit 3bf9d2c

File tree

2 files changed

+14
-13
lines changed

2 files changed

+14
-13
lines changed
 

‎src/fetch/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ classifiers = [
1717
]
1818
dependencies = [
1919
"markdownify>=0.13.1",
20-
"mcp>=1.0.0",
20+
"mcp>=1.1.3",
2121
"protego>=0.3.1",
2222
"pydantic>=2.0.0",
2323
"readabilipy>=0.2.0",

‎src/fetch/src/mcp_server_fetch/server.py

+13-12
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from mcp.server import Server
88
from mcp.server.stdio import stdio_server
99
from mcp.types import (
10+
ErrorData,
1011
GetPromptResult,
1112
Prompt,
1213
PromptArgument,
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
7980
headers={"User-Agent": user_agent},
8081
)
8182
except HTTPError:
82-
raise McpError(
83+
raise McpError(ErrorData(
8384
INTERNAL_ERROR,
8485
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
85-
)
86+
))
8687
if response.status_code in (401, 403):
87-
raise McpError(
88+
raise McpError(ErrorData(
8889
INTERNAL_ERROR,
8990
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
90-
)
91+
))
9192
elif 400 <= response.status_code < 500:
9293
return
9394
robot_txt = response.text
@@ -96,15 +97,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
9697
)
9798
robot_parser = Protego.parse(processed_robot_txt)
9899
if not robot_parser.can_fetch(str(url), user_agent):
99-
raise McpError(
100+
raise McpError(ErrorData(
100101
INTERNAL_ERROR,
101102
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
102103
f"<useragent>{user_agent}</useragent>\n"
103104
f"<url>{url}</url>"
104105
f"<robots>\n{robot_txt}\n</robots>\n"
105106
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
106107
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
107-
)
108+
))
108109

109110

110111
async def fetch_url(
@@ -124,12 +125,12 @@ async def fetch_url(
124125
timeout=30,
125126
)
126127
except HTTPError as e:
127-
raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
128+
raise McpError(ErrorData(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}"))
128129
if response.status_code >= 400:
129-
raise McpError(
130+
raise McpError(ErrorData(
130131
INTERNAL_ERROR,
131132
f"Failed to fetch {url} - status code {response.status_code}",
132-
)
133+
))
133134

134135
page_raw = response.text
135136

@@ -221,11 +222,11 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
221222
try:
222223
args = Fetch(**arguments)
223224
except ValueError as e:
224-
raise McpError(INVALID_PARAMS, str(e))
225+
raise McpError(ErrorData(INVALID_PARAMS, str(e)))
225226

226227
url = str(args.url)
227228
if not url:
228-
raise McpError(INVALID_PARAMS, "URL is required")
229+
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
229230

230231
if not ignore_robots_txt:
231232
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
@@ -253,7 +254,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
253254
@server.get_prompt()
254255
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
255256
if not arguments or "url" not in arguments:
256-
raise McpError(INVALID_PARAMS, "URL is required")
257+
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
257258

258259
url = arguments["url"]
259260

0 commit comments

Comments
 (0)
Please sign in to comment.