Skip to content

Commit 53f4a23

Browse files
committed
Update server implementation and dependencies
1 parent 49489d4 commit 53f4a23

File tree

2 files changed

+113
-96
lines changed

2 files changed

+113
-96
lines changed

src/fetch/src/mcp_server_fetch/server.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -81,13 +81,13 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
8181
)
8282
except HTTPError:
8383
raise McpError(ErrorData(
84-
INTERNAL_ERROR,
85-
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
84+
code=INTERNAL_ERROR,
85+
message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
8686
))
8787
if response.status_code in (401, 403):
8888
raise McpError(ErrorData(
89-
INTERNAL_ERROR,
90-
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
89+
code=INTERNAL_ERROR,
90+
message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
9191
))
9292
elif 400 <= response.status_code < 500:
9393
return
@@ -98,8 +98,8 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
9898
robot_parser = Protego.parse(processed_robot_txt)
9999
if not robot_parser.can_fetch(str(url), user_agent):
100100
raise McpError(ErrorData(
101-
INTERNAL_ERROR,
102-
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
101+
code=INTERNAL_ERROR,
102+
message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
103103
f"<useragent>{user_agent}</useragent>\n"
104104
f"<url>{url}</url>"
105105
f"<robots>\n{robot_txt}\n</robots>\n"
@@ -125,11 +125,11 @@ async def fetch_url(
125125
timeout=30,
126126
)
127127
except HTTPError as e:
128-
raise McpError(ErrorData(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}"))
128+
raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}"))
129129
if response.status_code >= 400:
130130
raise McpError(ErrorData(
131-
INTERNAL_ERROR,
132-
f"Failed to fetch {url} - status code {response.status_code}",
131+
code=INTERNAL_ERROR,
132+
message=f"Failed to fetch {url} - status code {response.status_code}",
133133
))
134134

135135
page_raw = response.text
@@ -222,11 +222,11 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
222222
try:
223223
args = Fetch(**arguments)
224224
except ValueError as e:
225-
raise McpError(ErrorData(INVALID_PARAMS, str(e)))
225+
raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e)))
226226

227227
url = str(args.url)
228228
if not url:
229-
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
229+
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
230230

231231
if not ignore_robots_txt:
232232
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
@@ -254,7 +254,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
254254
@server.get_prompt()
255255
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
256256
if not arguments or "url" not in arguments:
257-
raise McpError(ErrorData(INVALID_PARAMS, "URL is required"))
257+
raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
258258

259259
url = arguments["url"]
260260

0 commit comments

Comments
 (0)