Files
speedtest/mcp_server.py

162 lines
6.3 KiB
Python

from mcp.server.fastmcp import FastMCP
import subprocess
import json
import logging
import xml.etree.ElementTree as ET
import httpx
# Initialize FastMCP server
mcp = FastMCP("Speedtest")
@mcp.tool()
async def list_servers(search: str = None) -> str:
"""
List available speedtest servers globally.
Args:
search: Optional search term to filter servers (e.g. "Jakarta", "Singapore", "US").
If omitted, returns a small sample of servers.
Returns:
JSON string of available servers.
"""
url = "https://www.speedtest.net/speedtest-servers-static.php"
try:
# Fetch XML list
async with httpx.AsyncClient() as client:
response = await client.get(url)
response.raise_for_status()
root = ET.fromstring(response.content)
results = []
# XML structure: <settings><servers><server .../><server .../></servers></settings>
# or sometimes just <settings><server .../></settings> depending on version,
# but usually it's nested in servers.
servers_list = root.find("servers")
if servers_list is None:
# Fallback if structure is flat
iterator = root.findall("server")
else:
iterator = servers_list.findall("server")
for server in iterator:
# Attributes: url, lat, lon, name, country, cc, sponsor, id, host
data = server.attrib
if search:
search_lower = search.lower()
# Search in country, name, Sponsor
if (search_lower in data.get('country', '').lower() or
search_lower in data.get('name', '').lower() or
search_lower in data.get('sponsor', '').lower()):
results.append(data)
else:
# If no search, we don't want to return 8000 servers. Return top 20.
if len(results) < 20:
results.append(data)
if len(results) > 50:
results = results[:50]
# Hardcoded International Servers (Curated for Bandwidth Testing)
# These will always be appended to search results or default list
international_servers = [
{"id": "13623", "name": "Singtel", "country": "Singapore", "sponsor": "Singtel", "host": "Singapore"},
{"id": "4871", "name": "M1 Limited", "country": "Singapore", "sponsor": "M1", "host": "Singapore"},
{"id": "60667", "name": "DigitalOcean", "country": "Singapore", "sponsor": "DigitalOcean", "host": "Singapore"},
{"id": "21569", "name": "Google Cloud", "country": "Japan", "sponsor": "Google", "host": "Tokyo"},
{"id": "15047", "name": "AT&T", "country": "United States", "sponsor": "AT&T", "host": "New York, NY"},
{"id": "18335", "name": "Cloudflare", "country": "United States", "sponsor": "Cloudflare", "host": "San Francisco, CA"},
]
# Merge international servers (avoid duplicates)
existing_ids = {s.get('id') for s in results}
for s in international_servers:
if str(s['id']) not in existing_ids:
# Simple filter matching
if not search:
results.append(s)
elif search:
search_lower = search.lower()
if (search_lower in s['country'].lower() or
search_lower in s['name'].lower() or
search_lower in s['sponsor'].lower()):
results.append(s)
return json.dumps(results, indent=2)
except Exception as e:
return f"Error listing servers: {str(e)}"
@mcp.tool()
def run_speedtest(server_id: int = None) -> str:
"""
Run a speedtest.
Args:
server_id: Optional ID of the server to test against. If omitted, uses auto-selection.
Returns:
JSON string containing the speedtest results (download, upload, ping, etc).
"""
cmd = ["/usr/bin/speedtest", "--accept-license", "--accept-gdpr"]
if server_id:
cmd.extend(["-s", str(server_id)])
try:
# Run in standard text mode (JSON mode is flaky for remote servers)
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
output = result.stdout
# Parse text output with Regex
import re
# Example output:
# Download: 123.45 Mbps
# Upload: 12.34 Mbps
# Idle Latency: 10.50 ms
# Result URL: https://...
data = {
"download": {"bandwidth": 0, "unit": "Mbps"},
"upload": {"bandwidth": 0, "unit": "Mbps"},
"ping": {"latency": 0, "unit": "ms"},
"result": {"url": ""}
}
# Extract Download
dl_match = re.search(r"Download:\s+([\d\.]+)\s+Mbps", output)
if dl_match:
# MCP expects structured data. Note: Standard JSON output uses bytes/sec.
# Here we keep Mbps but structure it similar to official JSON for consistency if client parses it.
# But simpler is better: Just return clear values.
data["download"]["bandwidth"] = float(dl_match.group(1))
# Extract Upload
ul_match = re.search(r"Upload:\s+([\d\.]+)\s+Mbps", output)
if ul_match:
data["upload"]["bandwidth"] = float(ul_match.group(1))
# Extract Latency (Idle Latency or just Latency)
ping_match = re.search(r"(?:Idle )?Latency:\s+([\d\.]+)\s+ms", output)
if ping_match:
data["ping"]["latency"] = float(ping_match.group(1))
# Extract URL
url_match = re.search(r"Result URL:\s+(https?://\S+)", output)
if url_match:
data["result"]["url"] = url_match.group(1)
# Raw parsed text for debugging if needed
data["raw_text"] = output
return json.dumps(data, indent=2)
except subprocess.CalledProcessError as e:
return f"Error running speedtest: {e.stderr or e.stdout}"
except Exception as e:
return f"Error: {str(e)}"
if __name__ == "__main__":
# fastmcp runs on stdio by default when called this way
mcp.run()