Complete support for all Model Context Protocol features
mcp-agent provides first-class support for all MCP primitives: tools, resources, prompts, and roots. This enables seamless integration with any MCP server and client.
Agents automatically get access to all MCP capabilities from connected servers:
Copy
Ask AI
from mcp_agent.app import MCPAppfrom mcp_agent.agents.agent import Agentapp = MCPApp(name="mcp_demo")async def demonstrate_mcp(): async with app.run(): # Create agent with access to MCP servers agent = Agent( name="mcp_agent", instruction="Use all available MCP capabilities.", server_names=["filesystem", "database", "api"] ) async with agent: # List available tools tools = await agent.list_tools() print(f"Available tools: {[t.name for t in tools.tools]}") # List available resources resources = await agent.list_resources() print(f"Available resources: {[r.uri for r in resources.resources]}") # List available prompts prompts = await agent.list_prompts() print(f"Available prompts: {[p.name for p in prompts.prompts]}") # List available roots roots = await agent.list_roots() print(f"Available roots: {[r.uri for r in roots.roots]}")
# Call a tool from an MCP serverresult = await agent.call_tool( "read_file", arguments={"path": "/data/config.json"})print(f"File content: {result.content}")# Tools are automatically available to LLMsllm = await agent.attach_llm(OpenAIAugmentedLLM)response = await llm.generate_str( "Read the config file and summarize its settings")
Roots provide file system access with proper permissions:
Copy
Ask AI
# List files in a rootfiles = await agent.list_root_contents("workspace://project")# Read file from rootcontent = await agent.read_root_file( "workspace://project/src/main.py")
Example: Document Analysis with Multiple MCP Servers
Copy
Ask AI
from mcp_agent.app import MCPAppfrom mcp_agent.agents.agent import Agentfrom mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLMapp = MCPApp(name="document_analyzer")async def analyze_documents(): async with app.run(): # Create agent with access to multiple MCP servers agent = Agent( name="analyst", instruction="""You are a document analyst. Use filesystem to read documents, database to store findings, and web APIs to enrich data.""", server_names=["filesystem", "postgres", "web_search"] ) async with agent: llm = await agent.attach_llm(OpenAIAugmentedLLM) # Complex multi-step analysis using various MCP capabilities result = await llm.generate_str(""" 1. Read all PDF files in /documents folder 2. Extract key metrics from each document 3. Store findings in the analysis_results table 4. Search web for industry benchmarks 5. Generate comparative analysis report """) print(result)
async def combined_mcp_features(): async with app.run(): agent = Agent( name="researcher", server_names=["research_server"] ) async with agent: # Get prompt template with embedded resource analysis = await agent.create_prompt( prompt_name="analyze_with_context", arguments={"style": "detailed"}, resource_uri="research://papers/latest.pdf" ) # The prompt now includes the resource content llm = await agent.attach_llm(OpenAIAugmentedLLM) result = await llm.generate_str(analysis) return result
from mcp_agent.mcp.gen_client import gen_clientasync def connect_dynamically(): # Connect to server not in config async with gen_client("dynamic_server", { "command": "npx", "args": ["@modelcontextprotocol/server-everything"] }) as server: tools = await server.list_tools() result = await server.call_tool( "example_tool", arguments={"param": "value"} )
async def discover_servers(): # Scan for available MCP servers servers = await scan_for_mcp_servers( port_range=(5000, 6000), timeout=5 ) for server_info in servers: print(f"Found: {server_info.name} at {server_info.url}") # Connect and explore async with gen_client(server_info.name, server_info.params) as server: tools = await server.list_tools() print(f" Tools: {[t.name for t in tools.tools]}")