From ebecd590e2d190c63918ac648943e791130b7127 Mon Sep 17 00:00:00 2001
From: Aleksey Zhukov <353748+alezkv@users.noreply.github.com>
Date: Fri, 10 Jan 2025 21:53:57 +0100
Subject: [PATCH] Update error handling to use ErrorData consistently

Also:
Updated mcp dependency from 1.0.0 to 1.1.3

This aligns with MCP's error handling patterns and provides better
structured error information.
---
 src/fetch/pyproject.toml                 |  2 +-
 src/fetch/src/mcp_server_fetch/server.py | 25 ++++++++++++------------
 2 files changed, 14 insertions(+), 13 deletions(-)

diff --git a/src/fetch/pyproject.toml b/src/fetch/pyproject.toml
index 1d43cae83..ed76fdcd8 100644
--- a/src/fetch/pyproject.toml
+++ b/src/fetch/pyproject.toml
@@ -17,7 +17,7 @@ classifiers = [
 ]
 dependencies = [
     "markdownify>=0.13.1",
-    "mcp>=1.0.0",
+    "mcp>=1.1.3",
     "protego>=0.3.1",
     "pydantic>=2.0.0",
     "readabilipy>=0.2.0",
diff --git a/src/fetch/src/mcp_server_fetch/server.py b/src/fetch/src/mcp_server_fetch/server.py
index ef029a491..b3f670ce9 100644
--- a/src/fetch/src/mcp_server_fetch/server.py
+++ b/src/fetch/src/mcp_server_fetch/server.py
@@ -7,6 +7,7 @@
 from mcp.server import Server
 from mcp.server.stdio import stdio_server
 from mcp.types import (
+    ErrorData,
     GetPromptResult,
     Prompt,
     PromptArgument,
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
                 headers={"User-Agent": user_agent},
             )
         except HTTPError:
-            raise McpError(
+            raise McpError(ErrorData(
                 INTERNAL_ERROR,
                 f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
-            )
+            ))
         if response.status_code in (401, 403):
-            raise McpError(
+            raise McpError(ErrorData(
                 INTERNAL_ERROR,
                 f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
-            )
+            ))
         elif 400 <= response.status_code < 500:
             return
         robot_txt = response.text
@@ -96,7 +97,7 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
     )
     robot_parser = Protego.parse(processed_robot_txt)
     if not robot_parser.can_fetch(str(url), user_agent):
-        raise McpError(
+        raise McpError(ErrorData(
             INTERNAL_ERROR,
             f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
             f"<useragent>{user_agent}</useragent>\n"
@@ -104,7 +105,7 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
             f"<robots>\n{robot_txt}\n</robots>\n"
             f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
             f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
-        )
+        ))
 
 
 async def fetch_url(
@@ -124,12 +125,12 @@ async def fetch_url(
                 timeout=30,
             )
         except HTTPError as e:
-            raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
+            raise McpError(ErrorData(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}"))
         if response.status_code >= 400:
-            raise McpError(
+            raise McpError(ErrorData(
                 INTERNAL_ERROR,
                 f"Failed to fetch {url} - status code {response.status_code}",
-            )
+            ))
 
         page_raw = response.text
 
@@ -221,11 +222,11 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
         try:
             args = Fetch(**arguments)
         except ValueError as e:
-            raise McpError(INVALID_PARAMS, str(e))
+            raise McpError(ErrorData(INVALID_PARAMS, str(e)))
 
         url = str(args.url)
         if not url:
-            raise McpError(INVALID_PARAMS, "URL is required")
+            raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
 
         if not ignore_robots_txt:
             await check_may_autonomously_fetch_url(url, user_agent_autonomous)
@@ -253,7 +254,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
     @server.get_prompt()
     async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
         if not arguments or "url" not in arguments:
-            raise McpError(INVALID_PARAMS, "URL is required")
+            raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
 
         url = arguments["url"]