mcp_utils.py to maintain a json of all known MCP Servers

Unit tests for mcp_utils.py
This commit is contained in:
Vinod Jayachandran 2025-03-26 21:03:55 +05:30 committed by vinodjayachandran
parent 2adb2fc6a5
commit 93b96ed1fb
3 changed files with 635 additions and 15 deletions

View file

@ -0,0 +1,103 @@
"""
Utility functions for Model Context Protocol (MCP) server management.
"""
import os
import json
import requests
import re
from typing import Dict, Any, List
def extract_json_from_markdown(markdown_content: str) -> List[Dict[str, Any]]:
"""
Extracts JSON configuration from markdown content by looking for code blocks.
Args:
markdown_content (str): The markdown content to parse
Returns:
Dict[str, Any]: Extracted JSON configuration or empty dict if not found
"""
# Use a regex to find all JSON code blocks
json_blocks = re.findall(r"```json\s*([\s\S]*?)```", markdown_content, re.DOTALL)
extracted_jsons = []
for block in json_blocks:
try:
# Attempt to parse each JSON block
extracted_jsons.append(json.loads(block.strip()))
except json.JSONDecodeError:
continue # Skip invalid JSON blocks
return extracted_jsons
def fetch_mcp_servers() -> List[Dict[str, Any]]:
"""
Fetches MCP server configurations from the modelcontextprotocol/servers repository
and returns them in a standardized format.
Scans the src directory README.md files for server configurations and extracts JSON from key "mcpServers".
Returns:
List[Dict[str, Any]]: List of server configurations
"""
base_url = "https://api.github.com/repos/modelcontextprotocol/servers/contents/src"
headers = {"Accept": "application/vnd.github.v3+json"}
try:
# Get list of files in the src directory
response = requests.get(base_url, headers=headers)
response.raise_for_status()
server_configs = []
for item in response.json():
if item["type"] != "dir": # Skip non-directory items
continue
# Get the README.md content
readme_url = f"https://raw.githubusercontent.com/modelcontextprotocol/servers/main/src/{item['name']}/README.md"
readme_response = requests.get(readme_url)
if readme_response.status_code != 200:
continue
# Extract JSON configuration from the README
config = extract_json_from_markdown(readme_response.text)
# Iterate over each JSON object in the list
for json_obj in config:
if isinstance(json_obj, dict): # Ensure it's a dictionary
for key, value in json_obj.items():
if key == "mcpServers" and isinstance(value, dict):
server_configs.append(value)
break
return server_configs
except requests.RequestException as e:
print(f"Error fetching MCP servers: {e}")
return []
def update_mcp_servers_file(output_file: str = None) -> None:
"""
Updates the MCP servers JSON file with the latest configurations.
Args:
output_file (str): Path to the output JSON file. Defaults to the root directory of the repository.
"""
# Determine the root directory of the repository
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))
output_file = output_file or os.path.join(root_dir, "mcp_servers.json")
servers = fetch_mcp_servers()
if servers:
with open(output_file, 'w') as f:
json.dump(servers, f, indent=2)
print(f"Successfully updated {output_file} with {len(servers)} server configurations")
else:
print("No server configurations were fetched. File not updated.")
if __name__ == "__main__":
# Update the MCP servers file in the root directory of the repository
update_mcp_servers_file()

View file

@ -1,16 +1,382 @@
{
"brave-search": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"BRAVE_API_KEY",
"mcp/brave-search"
],
"env": {
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
}
[
{
"aws-kb-retrieval": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"AWS_ACCESS_KEY_ID",
"-e",
"AWS_SECRET_ACCESS_KEY",
"-e",
"AWS_REGION",
"mcp/aws-kb-retrieval-server"
],
"env": {
"AWS_ACCESS_KEY_ID": "YOUR_ACCESS_KEY_HERE",
"AWS_SECRET_ACCESS_KEY": "YOUR_SECRET_ACCESS_KEY_HERE",
"AWS_REGION": "YOUR_AWS_REGION_HERE"
}
}
}
},
{
"aws-kb-retrieval": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-aws-kb-retrieval"
],
"env": {
"AWS_ACCESS_KEY_ID": "YOUR_ACCESS_KEY_HERE",
"AWS_SECRET_ACCESS_KEY": "YOUR_SECRET_ACCESS_KEY_HERE",
"AWS_REGION": "YOUR_AWS_REGION_HERE"
}
}
},
{
"brave-search": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"BRAVE_API_KEY",
"mcp/brave-search"
],
"env": {
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
}
}
},
{
"brave-search": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-brave-search"
],
"env": {
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
}
}
},
{
"everart": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"EVERART_API_KEY",
"mcp/everart"
],
"env": {
"EVERART_API_KEY": "your_key_here"
}
}
},
{
"everart": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-everart"
],
"env": {
"EVERART_API_KEY": "your_key_here"
}
}
},
{
"everything": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-everything"
]
}
},
{
"filesystem": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"--mount",
"type=bind,src=/Users/username/Desktop,dst=/projects/Desktop",
"--mount",
"type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
"--mount",
"type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
"mcp/filesystem",
"/projects"
]
}
},
{
"filesystem": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"/Users/username/Desktop",
"/path/to/other/allowed/dir"
]
}
},
{
"gdrive": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-v",
"mcp-gdrive:/gdrive-server",
"-e",
"GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json",
"mcp/gdrive"
]
}
},
{
"gdrive": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-gdrive"
]
}
},
{
"git": {
"command": "docker",
"args": [
"run",
"--rm",
"-i",
"--mount",
"type=bind,src=/Users/username/Desktop,dst=/projects/Desktop",
"--mount",
"type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
"--mount",
"type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
"mcp/git"
]
}
},
{
"github": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"GITHUB_PERSONAL_ACCESS_TOKEN",
"mcp/github"
],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
}
}
},
{
"github": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-github"
],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "<YOUR_TOKEN>"
}
}
},
{
"google-maps": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"GOOGLE_MAPS_API_KEY",
"mcp/google-maps"
],
"env": {
"GOOGLE_MAPS_API_KEY": "<YOUR_API_KEY>"
}
}
},
{
"google-maps": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-google-maps"
],
"env": {
"GOOGLE_MAPS_API_KEY": "<YOUR_API_KEY>"
}
}
},
{
"memory": {
"command": "docker",
"args": [
"run",
"-i",
"-v",
"claude-memory:/app/dist",
"--rm",
"mcp/memory"
]
}
},
{
"memory": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-memory"
]
}
},
{
"memory": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-memory"
],
"env": {
"MEMORY_FILE_PATH": "/path/to/custom/memory.json"
}
}
},
{
"postgres": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"mcp/postgres",
"postgresql://host.docker.internal:5432/mydb"
]
}
},
{
"postgres": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-postgres",
"postgresql://localhost/mydb"
]
}
},
{
"puppeteer": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"--init",
"-e",
"DOCKER_CONTAINER=true",
"mcp/puppeteer"
]
}
},
{
"puppeteer": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-puppeteer"
]
}
},
{
"redis": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"mcp/redis",
"redis://host.docker.internal:6379"
]
}
},
{
"redis": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-redis",
"redis://localhost:6379"
]
}
},
{
"sequential-thinking": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-sequential-thinking"
]
}
},
{
"sequentialthinking": {
"command": "docker",
"args": [
"run",
"--rm",
"-i",
"mcp/sequentialthinking"
]
}
},
{
"slack": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-slack"
],
"env": {
"SLACK_BOT_TOKEN": "xoxb-your-bot-token",
"SLACK_TEAM_ID": "T01234567"
}
}
},
{
"slack": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"SLACK_BOT_TOKEN",
"-e",
"SLACK_TEAM_ID",
"mcp/slack"
],
"env": {
"SLACK_BOT_TOKEN": "xoxb-your-bot-token",
"SLACK_TEAM_ID": "T01234567"
}
}
}
]

View file

@ -0,0 +1,151 @@
import pytest
import requests
from unittest.mock import patch, mock_open
from litellm.experimental_mcp_client.mcp_utils import extract_json_from_markdown, fetch_mcp_servers, update_mcp_servers_file
def test_extract_json_from_markdown_valid():
markdown_content = '''
# Test Markdown
Here's some JSON:
```json
{
"name": "test-server",
"url": "http://test.com"
}
```
And another one:
```json
{
"name": "test-server-2",
"url": "http://test2.com"
}
```
'''
result = extract_json_from_markdown(markdown_content)
assert len(result) == 2
assert result[0]["name"] == "test-server"
assert result[1]["name"] == "test-server-2"
def test_extract_json_from_markdown_invalid():
markdown_content = '''
# Test Markdown
Here's some invalid JSON:
```json
{
"name": "test-server",
invalid json here
}
```
And valid JSON:
```json
{
"name": "test-server-2",
"url": "http://test2.com"
}
```
'''
result = extract_json_from_markdown(markdown_content)
assert len(result) == 1
assert result[0]["name"] == "test-server-2"
def test_extract_json_from_markdown_empty():
markdown_content = "# No JSON here"
result = extract_json_from_markdown(markdown_content)
assert len(result) == 0
@pytest.fixture
def mock_github_response():
return [
{"type": "dir", "name": "server1"},
{"type": "file", "name": "something.txt"},
{"type": "dir", "name": "server2"}
]
@pytest.fixture
def mock_readme_content():
return '''
# Server Config
```json
{
"name": "test-server",
"url": "http://test.com"
}
```
'''
@pytest.fixture
def mock_mcp_readme_content():
return '''
# Server Config
```json
{
"mcpServers": {
"brave-search": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"BRAVE_API_KEY",
"mcp/brave-search"
],
"env": {
"BRAVE_API_KEY": "YOUR_API_KEY_HERE"
}
}
}
}
```
'''
@patch('requests.get')
def test_fetch_mcp_servers_success(mock_get, mock_github_response, mock_mcp_readme_content, mock_readme_content):
# Mock the responses
mock_get.side_effect = [
type('Response', (), {
'json': lambda: mock_github_response,
'raise_for_status': lambda: None,
'status_code': 200
}),
type('Response', (), {
'text': mock_mcp_readme_content,
'status_code': 200
}),
type('Response', (), {
'text': mock_readme_content,
'status_code': 200
})
]
result = fetch_mcp_servers()
assert len(result) == 1
assert all(isinstance(server, dict) for server in result)
assert mock_get.call_count == 3 # One for base URL, two for READMEs
@patch('requests.get')
def test_fetch_mcp_servers_request_error(mock_get):
mock_get.side_effect = requests.RequestException("Connection error")
result = fetch_mcp_servers()
assert result == []
@patch('builtins.open', new_callable=mock_open)
@patch('json.dump')
@patch('litellm.experimental_mcp_client.mcp_utils.fetch_mcp_servers')
def test_update_mcp_servers_file_success(mock_fetch, mock_json_dump, mock_file):
mock_servers = [
{"name": "server1", "url": "http://test1.com"},
{"name": "server2", "url": "http://test2.com"}
]
mock_fetch.return_value = mock_servers
update_mcp_servers_file("test_output.json")
mock_file.assert_called_once_with("test_output.json", 'w')
mock_json_dump.assert_called_once_with(mock_servers, mock_file(), indent=2)
@patch('litellm.experimental_mcp_client.mcp_utils.fetch_mcp_servers')
def test_update_mcp_servers_file_no_servers(mock_fetch):
mock_fetch.return_value = []
update_mcp_servers_file("test_output.json")
# No file should be written when no servers are fetched