The fast, Pythonic way to build MCP servers and clients with minimal boilerplate code.
—
Full-featured MCP client supporting multiple transports, authentication, and advanced features like LLM sampling. The FastMCP client allows you to connect to and interact with any MCP server programmatically.
Main client implementation for connecting to MCP servers with support for multiple transports and authentication methods.
class Client:
def __init__(
self,
server_or_config: str | FastMCP | dict,
transport: ClientTransport | None = None,
auth: BearerAuth | OAuth | None = None,
sampling_handler: SamplingHandler | None = None,
elicitation_handler: ElicitationHandler | None = None,
timeout: float = 30.0
):
"""
Create a client to connect to an MCP server.
Parameters:
- server_or_config: Server script path, FastMCP instance, or MCP config
- transport: Specific transport to use (auto-detected if None)
- auth: Authentication for secured servers
- sampling_handler: Handler for server LLM sampling requests
- elicitation_handler: Handler for server elicitation requests
- timeout: Request timeout in seconds
"""List and call tools available on the connected MCP server.
async def list_tools(self) -> list[dict]:
"""
List all available tools on the server.
Returns:
List of tool metadata dictionaries with name, description, and schema
"""
async def call_tool(
self,
name: str,
arguments: dict | None = None
) -> ToolResult:
"""
Call a tool on the server.
Parameters:
- name: Tool name to call
- arguments: Tool arguments as dictionary
Returns:
Tool execution result with text content and optional metadata
"""List and read resources available on the connected MCP server.
async def list_resources(self) -> list[dict]:
"""
List all available resources on the server.
Returns:
List of resource metadata dictionaries with uri, name, description, and mime_type
"""
async def read_resource(self, uri: str) -> ResourceResult:
"""
Read a resource from the server.
Parameters:
- uri: Resource URI to read
Returns:
Resource content with text/binary data and metadata
"""
async def list_resource_templates(self) -> list[dict]:
"""
List all available resource templates on the server.
Returns:
List of resource template metadata with uri_template, name, and description
"""List and get prompts available on the connected MCP server.
async def list_prompts(self) -> list[dict]:
"""
List all available prompts on the server.
Returns:
List of prompt metadata dictionaries with name, description, and schema
"""
async def get_prompt(
self,
name: str,
arguments: dict | None = None
) -> PromptResult:
"""
Get a prompt from the server.
Parameters:
- name: Prompt name to retrieve
- arguments: Prompt arguments as dictionary
Returns:
Prompt result with formatted messages
"""Context manager support for automatic connection lifecycle management.
async def __aenter__(self) -> "Client":
"""Enter async context manager."""
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
"""Exit async context manager."""
async def close(self) -> None:
"""Close the client connection."""from fastmcp import Client
import asyncio
async def main():
# Connect to a server via stdio
async with Client("my_server.py") as client:
# List available capabilities
tools = await client.list_tools()
resources = await client.list_resources()
prompts = await client.list_prompts()
print(f"Available tools: {[t['name'] for t in tools]}")
print(f"Available resources: {[r['uri'] for r in resources]}")
print(f"Available prompts: {[p['name'] for p in prompts]}")
# Call a tool
result = await client.call_tool("add", {"a": 5, "b": 3})
print(f"Tool result: {result.text}")
# Read a resource
resource = await client.read_resource("config://version")
print(f"Resource content: {resource.content}")
# Get a prompt
prompt = await client.get_prompt("summarize", {"text": "Hello world"})
print(f"Prompt messages: {prompt.messages}")
asyncio.run(main())from fastmcp import Client
async def stdio_client():
"""Connect via stdio transport."""
async with Client("python server.py") as client:
result = await client.call_tool("hello", {"name": "World"})
return result.text
async def sse_client():
"""Connect via SSE transport."""
async with Client("http://localhost:8000/sse") as client:
tools = await client.list_tools()
return tools
async def http_client():
"""Connect via HTTP transport."""
async with Client("http://localhost:8000/mcp") as client:
resources = await client.list_resources()
return resources
# Auto-detection also works
async def auto_detect():
"""Client auto-detects transport type."""
clients = [
Client("./server.py"), # stdio
Client("http://localhost:8000/sse"), # SSE
Client("http://localhost:8000/mcp"), # HTTP
]
for client in clients:
async with client:
tools = await client.list_tools()
print(f"Connected with {len(tools)} tools")from fastmcp import Client
from fastmcp.client.auth import BearerAuth, OAuth
async def authenticated_client():
"""Connect to secured server with Bearer token."""
auth = BearerAuth("your-bearer-token")
async with Client(
"https://secure-server.com/mcp",
auth=auth
) as client:
result = await client.call_tool("secure_operation", {})
return result.text
async def oauth_client():
"""Connect with OAuth authentication."""
oauth = OAuth(
client_id="your-client-id",
client_secret="your-client-secret",
token_url="https://auth.example.com/token"
)
async with Client(
"https://api.example.com/mcp",
auth=oauth
) as client:
result = await client.call_tool("api_operation", {})
return result.textfrom fastmcp import Client
async def sampling_handler(messages):
"""Handle server LLM sampling requests."""
# This would typically connect to your LLM
# For example, OpenAI GPT, Claude, etc.
response = "This is a sample response"
return {"text": response}
async def client_with_sampling():
"""Client that can handle server sampling requests."""
async with Client(
"intelligent_server.py",
sampling_handler=sampling_handler
) as client:
# Server can now request LLM completions via ctx.sample()
result = await client.call_tool("analyze_data", {
"data": "Some complex data to analyze"
})
return result.textfrom fastmcp import Client
async def multi_server_client():
"""Connect to multiple servers with unified client."""
config = {
"mcpServers": {
"weather": {
"url": "https://weather-api.example.com/mcp"
},
"assistant": {
"command": "python",
"args": ["./assistant_server.py"]
},
"database": {
"url": "http://localhost:8080/mcp",
"auth": {"type": "bearer", "token": "db-token"}
}
}
}
async with Client(config) as client:
# Tools are prefixed with server names
forecast = await client.call_tool(
"weather_get_forecast",
{"city": "London"}
)
answer = await client.call_tool(
"assistant_answer_question",
{"query": "What is MCP?"}
)
data = await client.call_tool(
"database_query",
{"sql": "SELECT * FROM users LIMIT 10"}
)
return {
"forecast": forecast.text,
"answer": answer.text,
"data": data.text
}from fastmcp import FastMCP, Client
async def test_server_with_client():
"""Test a FastMCP server using in-memory client."""
# Create server
mcp = FastMCP("Test Server")
@mcp.tool
def multiply(a: int, b: int) -> int:
"""Multiply two numbers."""
return a * b
# Connect via in-memory transport (no subprocess)
async with Client(mcp) as client:
# Test the server directly
result = await client.call_tool("multiply", {"a": 6, "b": 7})
assert result.text == "42"
tools = await client.list_tools()
assert len(tools) == 1
assert tools[0]["name"] == "multiply"class ToolResult:
"""Result from calling a tool."""
text: str
content: list[dict] | None
is_error: bool
class ResourceResult:
"""Result from reading a resource."""
content: str | bytes
mime_type: str | None
uri: str
class PromptResult:
"""Result from getting a prompt."""
messages: list[dict]
description: str | NoneInstall with Tessl CLI
npx tessl i tessl/pypi-fastmcp