Skip to content
This repository was archived by the owner on Nov 16, 2025. It is now read-only.

Commit 3a39c8a

Browse files
committed
Add endpoint-specific timeouts and flush events after changes
Introduces per-endpoint timeout configuration in Ghidra client for improved reliability of expensive operations. Ensures event processing and cache refresh in BatchSetComments, SetPlateComment, and RenameFunction handlers by flushing events and adding a short delay after successful changes, improving decompiler cache consistency.
1 parent 39f8c9a commit 3a39c8a

4 files changed

Lines changed: 128 additions & 60 deletions

File tree

bridge_mcp_ghidra/client.py

Lines changed: 94 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,21 @@
1616

1717
ENABLE_CACHING = True
1818

19+
# Per-endpoint timeout configuration for expensive operations
20+
ENDPOINT_TIMEOUTS = {
21+
'document_function_complete': 120, # 2 minutes - comprehensive atomic documentation
22+
'batch_rename_variables': 60, # 1 minute - variable renames can trigger re-analysis
23+
'batch_set_comments': 45, # 45 seconds - multiple comment operations
24+
'analyze_function_complete': 60, # 1 minute - comprehensive analysis with decompilation
25+
'batch_decompile_functions': 90, # 1.5 minutes - multiple decompilations
26+
'batch_rename_function_components': 60, # 1 minute - multiple rename operations
27+
'batch_set_variable_types': 60, # 1 minute - DataType lookups can be slow
28+
'analyze_data_region': 60, # 1 minute - complex data analysis
29+
'batch_decompile_xref_sources': 90, # 1.5 minutes - multiple decompilations
30+
'create_and_apply_data_type': 45, # 45 seconds - struct creation + application
31+
'default': 30 # 30 seconds for all other operations
32+
}
33+
1934
# Make log level configurable via environment variable (DEBUG, INFO, WARNING, ERROR, CRITICAL)
2035
# Default to INFO for production use
2136
LOG_LEVEL = os.getenv("GHIDRA_MCP_LOG_LEVEL", "INFO")
@@ -122,10 +137,14 @@ def safe_get(self, endpoint: str, params: dict = None, retries: int = 3) -> list
122137

123138
url = urljoin(self.server_url, endpoint)
124139

140+
# Get endpoint-specific timeout
141+
self.timeout = get_timeout_for_endpoint(endpoint)
142+
self.logger.debug(f"Using timeout of {self.timeout}s for endpoint {endpoint}")
143+
125144
for attempt in range(retries):
126145
try:
127146
start_time = time.time()
128-
response = self.session.get(url, params=params, timeout=REQUEST_TIMEOUT)
147+
response = self.session.get(url, params=params, timeout=self.timeout)
129148
response.encoding = 'utf-8'
130149
duration = time.time() - start_time
131150

@@ -186,10 +205,14 @@ def safe_get_uncached(self, endpoint: str, params: dict = None, retries: int = 3
186205

187206
url = urljoin(self.server_url, endpoint)
188207

208+
# Get endpoint-specific timeout
209+
self.timeout = get_timeout_for_endpoint(endpoint)
210+
self.logger.debug(f"Using timeout of {self.timeout}s for endpoint {endpoint}")
211+
189212
for attempt in range(retries):
190213
try:
191214
start_time = time.time()
192-
response = self.session.get(url, params=params, timeout=REQUEST_TIMEOUT)
215+
response = self.session.get(url, params=params, timeout=self.timeout)
193216
response.encoding = 'utf-8'
194217
duration = time.time() - start_time
195218

@@ -228,62 +251,6 @@ def safe_get_uncached(self, endpoint: str, params: dict = None, retries: int = 3
228251

229252
return ["Unexpected error in safe_get_uncached"]
230253

231-
def safe_post_json(self, endpoint: str, data: dict, retries: int = 3) -> str:
232-
"""
233-
Perform a JSON POST request with enhanced error handling and retry logic.
234-
235-
Args:
236-
endpoint: The API endpoint to call
237-
data: Data to send as JSON
238-
retries: Number of retry attempts for server errors
239-
240-
Returns:
241-
String response from the server
242-
"""
243-
# Validate server URL for security
244-
if not validate_server_url(self.server_url):
245-
self.logger.error(f"Invalid or unsafe server URL: {self.server_url}")
246-
return "Error: Invalid server URL - only local addresses allowed"
247-
248-
url = urljoin(self.server_url, endpoint)
249-
250-
for attempt in range(retries):
251-
try:
252-
start_time = time.time()
253-
254-
self.logger.info(f"Sending JSON POST to {url} with data: {data}")
255-
response = self.session.post(url, json=data, timeout=REQUEST_TIMEOUT)
256-
257-
response.encoding = 'utf-8'
258-
duration = time.time() - start_time
259-
260-
self.logger.info(f"JSON POST to {endpoint} took {duration:.2f}s (attempt {attempt + 1}/{retries}), status: {response.status_code}")
261-
262-
if response.ok:
263-
return response.text.strip()
264-
elif response.status_code == 404:
265-
return f"Error: Endpoint {endpoint} not found"
266-
elif response.status_code >= 500:
267-
if attempt < retries - 1: # Only log retry attempts for server errors
268-
self.logger.warning(f"Server error {response.status_code} on attempt {attempt + 1}, retrying...")
269-
time.sleep(1) # Brief delay before retry
270-
continue
271-
else:
272-
return f"Error: Server error {response.status_code} after {retries} attempts"
273-
else:
274-
return f"Error: HTTP {response.status_code} - {response.text}"
275-
276-
except requests.RequestException as e:
277-
if attempt < retries - 1:
278-
self.logger.warning(f"Request failed on attempt {attempt + 1}, retrying: {e}")
279-
time.sleep(1)
280-
continue
281-
else:
282-
self.logger.error(f"Request failed after {retries} attempts: {e}")
283-
return f"Error: Request failed - {str(e)}"
284-
285-
return "Error: Maximum retries exceeded"
286-
287254
def safe_post(self, endpoint: str, data: dict | str, retries: int = 3) -> str:
288255
"""
289256
Perform a POST request with enhanced error handling and retry logic.
@@ -351,6 +318,66 @@ def safe_post(self, endpoint: str, data: dict | str, retries: int = 3) -> str:
351318
return f"Unexpected error: {str(e)}"
352319

353320
return "Unexpected error in safe_post"
321+
322+
def safe_post_json(self, endpoint: str, data: dict, retries: int = 3) -> str:
323+
"""
324+
Perform a JSON POST request with enhanced error handling and retry logic.
325+
326+
Args:
327+
endpoint: The API endpoint to call
328+
data: Data to send as JSON
329+
retries: Number of retry attempts for server errors
330+
331+
Returns:
332+
String response from the server
333+
"""
334+
# Validate server URL for security
335+
if not validate_server_url(self.server_url):
336+
self.logger.error(f"Invalid or unsafe server URL: {self.server_url}")
337+
return "Error: Invalid server URL - only local addresses allowed"
338+
339+
url = urljoin(self.server_url, endpoint)
340+
341+
# Get endpoint-specific timeout
342+
self.timeout = get_timeout_for_endpoint(endpoint)
343+
self.logger.debug(f"Using timeout of {self.timeout}s for endpoint {endpoint}")
344+
345+
for attempt in range(retries):
346+
try:
347+
start_time = time.time()
348+
349+
self.logger.info(f"Sending JSON POST to {url} with data: {data}")
350+
response = self.session.post(url, json=data, timeout=self.timeout)
351+
352+
response.encoding = 'utf-8'
353+
duration = time.time() - start_time
354+
355+
self.logger.info(f"JSON POST to {endpoint} took {duration:.2f}s (attempt {attempt + 1}/{retries}), status: {response.status_code}")
356+
357+
if response.ok:
358+
return response.text.strip()
359+
elif response.status_code == 404:
360+
return f"Error: Endpoint {endpoint} not found"
361+
elif response.status_code >= 500:
362+
if attempt < retries - 1: # Only log retry attempts for server errors
363+
self.logger.warning(f"Server error {response.status_code} on attempt {attempt + 1}, retrying...")
364+
time.sleep(1) # Brief delay before retry
365+
continue
366+
else:
367+
return f"Error: Server error {response.status_code} after {retries} attempts"
368+
else:
369+
return f"Error: HTTP {response.status_code} - {response.text}"
370+
371+
except requests.RequestException as e:
372+
if attempt < retries - 1:
373+
self.logger.warning(f"Request failed on attempt {attempt + 1}, retrying: {e}")
374+
time.sleep(1)
375+
continue
376+
else:
377+
self.logger.error(f"Request failed after {retries} attempts: {e}")
378+
return f"Error: Request failed - {str(e)}"
379+
380+
return "Error: Maximum retries exceeded"
354381

355382
def cache_key(*args: Any, **kwargs: Any) -> str:
356383
"""
@@ -359,11 +386,20 @@ def cache_key(*args: Any, **kwargs: Any) -> str:
359386
Returns:
360387
MD5 hash of serialized arguments
361388
"""
389+
362390
key_data = {"args": args, "kwargs": kwargs}
363391
return hashlib.md5(json.dumps(key_data, sort_keys=True, default=str).encode()).hexdigest()
364392

393+
def get_timeout_for_endpoint(endpoint: str) -> int:
394+
"""Get the appropriate timeout for a specific endpoint"""
395+
396+
# Extract endpoint name from URL path
397+
endpoint_name = endpoint.strip('/').split('/')[-1]
398+
return ENDPOINT_TIMEOUTS.get(endpoint_name, ENDPOINT_TIMEOUTS['default'])
399+
365400
def validate_server_url(url: str) -> bool:
366401
"""Validate that the server URL is safe to use"""
402+
367403
try:
368404
parsed = urlparse(url)
369405
# Only allow HTTP/HTTPS protocols

src/main/java/com/lauriewired/handlers/comments/BatchSetComments.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,17 @@ private String batchSetComments(String functionAddress, List<Map<String, String>
120120
}
121121
});
122122

123+
// Force event processing to ensure changes propagate to decompiler cache
124+
if (success.get()) {
125+
program.flushEvents();
126+
// Small delay to ensure decompiler cache refresh
127+
try {
128+
Thread.sleep(50);
129+
} catch (InterruptedException e) {
130+
Thread.currentThread().interrupt();
131+
}
132+
}
133+
123134
if (success.get()) {
124135
result.append("\"success\": true, ");
125136
result.append("\"decompiler_comments_set\": ").append(decompilerCount.get()).append(", ");

src/main/java/com/lauriewired/handlers/comments/SetPlateComment.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,17 @@ private String setPlateComment(String functionAddress, String comment) {
8585
program.endTransaction(tx, success.get());
8686
}
8787
});
88+
89+
// Force event processing to ensure changes propagate to decompiler cache
90+
if (success.get()) {
91+
program.flushEvents();
92+
// Small delay to ensure decompiler cache refresh
93+
try {
94+
Thread.sleep(50);
95+
} catch (InterruptedException e) {
96+
Thread.currentThread().interrupt();
97+
}
98+
}
8899
} catch (Exception e) {
89100
resultMsg.append("Error: Failed to execute on Swing thread: ").append(e.getMessage());
90101
}

src/main/java/com/lauriewired/handlers/functions/RenameFunction.java

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@
88
import ghidra.program.model.symbol.SourceType;
99
import ghidra.util.Msg;
1010

11-
import javax.swing.*;
1211
import java.io.IOException;
1312
import java.lang.reflect.InvocationTargetException;
1413
import java.util.Map;
1514
import java.util.concurrent.atomic.AtomicBoolean;
15+
import javax.swing.SwingUtilities;
1616

1717
import static com.lauriewired.util.ParseUtils.parsePostParams;
1818
import static com.lauriewired.util.ParseUtils.sendResponse;
@@ -43,7 +43,7 @@ public RenameFunction(PluginTool tool) {
4343
public void handle(HttpExchange exchange) throws IOException {
4444
Map<String, String> params = parsePostParams(exchange);
4545
String result = renameFunction(params.get("oldName"), params.get("newName"));
46-
sendResponse(exchange, result);
46+
sendResponse(exchange, result);
4747
}
4848

4949
/**
@@ -76,6 +76,16 @@ private boolean rename(String oldName, String newName) {
7676
program.endTransaction(tx, successFlag.get());
7777
}
7878
});
79+
80+
// Force event processing to ensure changes propagate
81+
if (successFlag.get()) {
82+
program.flushEvents();
83+
try {
84+
Thread.sleep(50);
85+
} catch (InterruptedException e) {
86+
Thread.currentThread().interrupt();
87+
}
88+
}
7989
} catch (InterruptedException | InvocationTargetException e) {
8090
Msg.error(this, "Failed to execute rename on Swing thread", e);
8191
}

0 commit comments

Comments
 (0)