MCP (Model Context Protocol) primitives are standardized building blocks that enable agents to access structured data and functionality from MCP servers. This guide shows you how to use the three core MCP primitives: resources, prompts, and tools.
First, create an MCP server that exposes resources and prompts:
Copy
Ask AI
from mcp.server.fastmcp import FastMCPimport datetimeimport jsonmcp = FastMCP("Resource Demo MCP Server")@mcp.resource("demo://docs/readme")def get_readme(): """Provide the README file content.""" return "# Demo Resource Server\n\nThis is a sample README resource."@mcp.prompt()def echo(message: str) -> str: """Echo the provided message. This is a simple prompt that echoes back the input message. """ return f"Prompt: {message}"if __name__ == "__main__": mcp.run()
Here’s how to use MCP primitives in your agent application:
Copy
Ask AI
import asynciofrom mcp_agent.app import MCPAppfrom mcp_agent.agents.agent import Agentfrom mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLMapp = MCPApp(name="mcp_basic_agent")async def example_usage(): async with app.run() as agent_app: logger = agent_app.logger # Create an agent connected to the demo server agent = Agent( name="agent", instruction="Demo agent for MCP resource and prompt primitives", server_names=["demo_server"], ) async with agent: # List all available resources resources = await agent.list_resources("demo_server") logger.info("Available resources:", data=resources.model_dump()) # List all available prompts prompts = await agent.list_prompts("demo_server") logger.info("Available prompts:", data=prompts.model_dump()) # Get both resource and prompt in a single call combined_messages = await agent.create_prompt( prompt_name="echo", arguments={"message": "My name is John Doe."}, resource_uris="demo://docs/readme", server_names=["demo_server"], ) # Use LLM to process the content llm = await agent.attach_llm(OpenAIAugmentedLLM) res = await llm.generate_str([ "Summarise what are my prompts and resources?", *combined_messages, ]) logger.info(f"Summary: {res}")if __name__ == "__main__": asyncio.run(example_usage())
# List all resources from a serverresources = await agent.list_resources("demo_server")# List all prompts from a server prompts = await agent.list_prompts("demo_server")# List all tools from a servertools = await agent.list_tools("demo_server")
# Create prompt message with prompt onlyprompt_messages = await agent.create_prompt( prompt_name="echo", arguments={"message": "Hello, world!"}, server_names=["demo_server"])# Create prompt messages with prompts and resourcescombined_messages = await agent.create_prompt( prompt_name="echo", arguments={"message": "My name is John Doe."}, resource_uris="demo://docs/readme", server_names=["demo_server"])