7
7
from mcp .server import Server
8
8
from mcp .server .stdio import stdio_server
9
9
from mcp .types import (
10
+ ErrorData ,
10
11
GetPromptResult ,
11
12
Prompt ,
12
13
PromptArgument ,
@@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
79
80
headers = {"User-Agent" : user_agent },
80
81
)
81
82
except HTTPError :
82
- raise McpError (
83
+ raise McpError (ErrorData (
83
84
INTERNAL_ERROR ,
84
85
f"Failed to fetch robots.txt { robot_txt_url } due to a connection issue" ,
85
- )
86
+ ))
86
87
if response .status_code in (401 , 403 ):
87
- raise McpError (
88
+ raise McpError (ErrorData (
88
89
INTERNAL_ERROR ,
89
90
f"When fetching robots.txt ({ robot_txt_url } ), received status { response .status_code } so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt" ,
90
- )
91
+ ))
91
92
elif 400 <= response .status_code < 500 :
92
93
return
93
94
robot_txt = response .text
@@ -96,15 +97,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
96
97
)
97
98
robot_parser = Protego .parse (processed_robot_txt )
98
99
if not robot_parser .can_fetch (str (url ), user_agent ):
99
- raise McpError (
100
+ raise McpError (ErrorData (
100
101
INTERNAL_ERROR ,
101
102
f"The sites robots.txt ({ robot_txt_url } ), specifies that autonomous fetching of this page is not allowed, "
102
103
f"<useragent>{ user_agent } </useragent>\n "
103
104
f"<url>{ url } </url>"
104
105
f"<robots>\n { robot_txt } \n </robots>\n "
105
106
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n "
106
107
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI." ,
107
- )
108
+ ))
108
109
109
110
110
111
async def fetch_url (
@@ -124,12 +125,12 @@ async def fetch_url(
124
125
timeout = 30 ,
125
126
)
126
127
except HTTPError as e :
127
- raise McpError (INTERNAL_ERROR , f"Failed to fetch { url } : { e !r} " )
128
+ raise McpError (ErrorData ( INTERNAL_ERROR , f"Failed to fetch { url } : { e !r} " ) )
128
129
if response .status_code >= 400 :
129
- raise McpError (
130
+ raise McpError (ErrorData (
130
131
INTERNAL_ERROR ,
131
132
f"Failed to fetch { url } - status code { response .status_code } " ,
132
- )
133
+ ))
133
134
134
135
page_raw = response .text
135
136
@@ -221,11 +222,11 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
221
222
try :
222
223
args = Fetch (** arguments )
223
224
except ValueError as e :
224
- raise McpError (INVALID_PARAMS , str (e ))
225
+ raise McpError (ErrorData ( INVALID_PARAMS , str (e ) ))
225
226
226
227
url = str (args .url )
227
228
if not url :
228
- raise McpError (INVALID_PARAMS , "URL is required" )
229
+ raise McpError (ErrorData ( INVALID_PARAMS , "URL is required" ) )
229
230
230
231
if not ignore_robots_txt :
231
232
await check_may_autonomously_fetch_url (url , user_agent_autonomous )
@@ -253,7 +254,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
253
254
@server .get_prompt ()
254
255
async def get_prompt (name : str , arguments : dict | None ) -> GetPromptResult :
255
256
if not arguments or "url" not in arguments :
256
- raise McpError (INVALID_PARAMS , "URL is required" )
257
+ raise McpError (ErrorData ( INVALID_PARAMS , "URL is required" ) )
257
258
258
259
url = arguments ["url" ]
259
260
0 commit comments