CLI utility and Python library for interacting with Large Language Models from multiple providers including OpenAI, Anthropic, Google, and Meta plus locally installed models.
—
Function calling system with automatic schema generation, tool chaining, and error handling. This module enables Large Language Models to execute Python functions, access external systems, and perform complex multi-step operations through a structured tool interface.
Tools wrap Python functions to make them callable by LLMs with automatic schema generation and parameter validation.
class Tool:
"""Function wrapper for LLM tool calls with schema generation."""
def __init__(
self,
function: Callable,
name: Optional[str] = None,
description: Optional[str] = None,
parameters: Optional[dict] = None
):
"""
Initialize tool.
Args:
function: Python function to wrap
name: Tool name (defaults to function name)
description: Tool description (from docstring if not provided)
parameters: JSON schema for parameters (auto-generated if not provided)
"""
@classmethod
def function(
cls,
function: Callable,
name: Optional[str] = None,
description: Optional[str] = None
) -> "Tool":
"""
Create tool from function with automatic schema generation.
Args:
function: Python function to wrap
name: Optional tool name override
description: Optional description override
Returns:
Tool instance
"""
def hash(self) -> str:
"""Generate stable hash for tool caching."""
name: str
description: str
parameters: dict
function: Callable
plugin: Optional[str] = None
def get_tools() -> Dict[str, Union[Tool, Type[Toolbox]]]:
"""Get all registered tools and toolboxes from plugins."""Toolboxes organize related tools into cohesive collections with shared setup and state management.
class Toolbox:
"""Base class for collections of related tools."""
def tools(self) -> Iterable[Tool]:
"""Return all tools in this toolbox."""
def add_tool(
self,
tool_or_function: Union[Tool, Callable],
pass_self: bool = False
):
"""
Add a tool to this toolbox.
Args:
tool_or_function: Tool instance or function to wrap
pass_self: Whether to pass toolbox instance to function
"""
def prepare(self):
"""Setup method called before toolbox use."""
async def prepare_async(self):
"""Async setup method called before toolbox use."""
name: Optional[str] = None
plugin: Optional[str] = NoneThe tool execution system handles function calls, parameter validation, and result processing.
class ToolCall:
"""Represents a tool function call request from an LLM."""
def __init__(self, function: str, arguments: dict, id: Optional[str] = None):
"""
Initialize tool call.
Args:
function: Tool function name
arguments: Function arguments as dict
id: Optional call ID for tracking
"""
function: str
arguments: dict
id: Optional[str]
class ToolResult:
"""Result of executing a tool call."""
def __init__(
self,
call: ToolCall,
output: Any,
error: Optional[str] = None,
attachments: Optional[List[Attachment]] = None
):
"""
Initialize tool result.
Args:
call: Original tool call
output: Function return value
error: Error message if execution failed
attachments: Optional result attachments
"""
call: ToolCall
output: Any
error: Optional[str] = None
attachments: Optional[List[Attachment]] = None
class ToolOutput:
"""Tool return value with optional attachments."""
def __init__(self, content: Any, attachments: Optional[List[Attachment]] = None):
"""
Initialize tool output.
Args:
content: Return value content
attachments: Optional file/media attachments
"""
content: Any
attachments: Optional[List[Attachment]] = NoneSpecial exception for controlling tool execution flow.
class CancelToolCall(Exception):
"""Exception to cancel tool execution and return message to LLM."""
def __init__(self, message: str):
"""
Initialize cancellation.
Args:
message: Message to return to LLM explaining cancellation
"""
self.message = messageThe package includes several built-in utility tools.
def llm_version() -> str:
"""Get installed LLM package version."""
def llm_time() -> dict:
"""
Get current time information.
Returns:
dict: Contains 'utc' and 'local' time strings
"""import llm
def calculate_area(length: float, width: float) -> float:
"""Calculate the area of a rectangle."""
return length * width
# Create tool from function
area_tool = llm.Tool.function(calculate_area)
# Use with model
model = llm.get_model("gpt-4")
response = model.prompt(
"What's the area of a rectangle that is 5.5 meters long and 3.2 meters wide?",
tools=[area_tool]
)
print(response.text())import llm
from typing import List, Dict, Optional
def search_database(
query: str,
table: str,
limit: Optional[int] = 10,
filters: Optional[Dict[str, str]] = None
) -> List[Dict[str, Any]]:
"""
Search database table with optional filters.
Args:
query: Search query string
table: Database table name
limit: Maximum number of results
filters: Optional column filters
Returns:
List of matching records
"""
# Implementation would query actual database
return [{"id": 1, "name": "Example", "query": query, "table": table}]
# Tool automatically generates schema from type hints
search_tool = llm.Tool.function(search_database)
model = llm.get_model()
response = model.prompt(
"Find all users named John in the customers table",
tools=[search_tool]
)
print(response.text())import llm
import matplotlib.pyplot as plt
import io
def create_chart(data: List[float], title: str = "Chart") -> llm.ToolOutput:
"""Create a chart from data and return as image attachment."""
plt.figure(figsize=(10, 6))
plt.plot(data)
plt.title(title)
plt.grid(True)
# Save to bytes
buffer = io.BytesIO()
plt.savefig(buffer, format='png')
buffer.seek(0)
plt.close()
# Return with attachment
attachment = llm.Attachment(content=buffer.getvalue(), type="image/png")
return llm.ToolOutput(
content=f"Created chart '{title}' with {len(data)} data points",
attachments=[attachment]
)
chart_tool = llm.Tool.function(create_chart)
model = llm.get_model("gpt-4-vision")
response = model.prompt(
"Create a chart showing the values [1, 4, 2, 8, 5, 7] with title 'Sample Data'",
tools=[chart_tool]
)
print(response.text())import llm
def dangerous_operation(action: str) -> str:
"""Perform a potentially dangerous system operation."""
dangerous_actions = ["delete", "format", "shutdown", "rm -rf"]
if any(dangerous in action.lower() for dangerous in dangerous_actions):
raise llm.CancelToolCall(
f"Cannot perform dangerous action: {action}. "
"Please choose a safer alternative."
)
return f"Performed safe action: {action}"
safety_tool = llm.Tool.function(dangerous_operation)
model = llm.get_model()
response = model.prompt(
"Please delete all files in /important/data",
tools=[safety_tool]
)
print(response.text()) # Will explain why the action was cancelledimport llm
import os
import json
class FileToolbox(llm.Toolbox):
"""Collection of file system tools."""
def __init__(self, base_path: str = "."):
self.base_path = base_path
def prepare(self):
"""Ensure base path exists."""
os.makedirs(self.base_path, exist_ok=True)
def tools(self):
return [
llm.Tool.function(self.read_file),
llm.Tool.function(self.write_file),
llm.Tool.function(self.list_files),
]
def read_file(self, filename: str) -> str:
"""Read contents of a file."""
path = os.path.join(self.base_path, filename)
try:
with open(path, 'r') as f:
return f.read()
except FileNotFoundError:
raise llm.CancelToolCall(f"File not found: {filename}")
def write_file(self, filename: str, content: str) -> str:
"""Write content to a file."""
path = os.path.join(self.base_path, filename)
with open(path, 'w') as f:
f.write(content)
return f"Wrote {len(content)} characters to {filename}"
def list_files(self) -> List[str]:
"""List files in the base directory."""
return os.listdir(self.base_path)
# Use toolbox
file_tools = FileToolbox("/safe/directory")
tools = list(file_tools.tools())
model = llm.get_model()
response = model.prompt(
"List the files, then read config.json if it exists",
tools=tools
)
print(response.text())import llm
import requests
def fetch_url(url: str) -> str:
"""Fetch content from a URL."""
response = requests.get(url)
response.raise_for_status()
return response.text[:1000] # Truncate for brevity
def analyze_text(text: str) -> dict:
"""Analyze text and return word count, sentence count, etc."""
words = len(text.split())
sentences = text.count('.') + text.count('!') + text.count('?')
return {
"word_count": words,
"sentence_count": sentences,
"character_count": len(text)
}
fetch_tool = llm.Tool.function(fetch_url)
analyze_tool = llm.Tool.function(analyze_text)
model = llm.get_model("gpt-4")
response = model.prompt(
"Fetch the content from https://example.com and analyze its text statistics",
tools=[fetch_tool, analyze_tool]
)
print(response.text())import llm
import asyncio
import aiohttp
async def async_fetch(url: str) -> str:
"""Asynchronously fetch content from URL."""
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
# Tools work with both sync and async functions
async_tool = llm.Tool.function(async_fetch)
async def main():
model = llm.get_async_model("gpt-4")
response = await model.prompt(
"Fetch the content from https://httpbin.org/json",
tools=[async_tool]
)
text = await response.text()
print(text)
asyncio.run(main())import llm
@llm.hookimpl
def register_tools(register):
"""Register tools via plugin system."""
def weather_tool(location: str) -> str:
"""Get weather for a location."""
return f"Weather in {location}: Sunny, 72°F"
def time_tool() -> str:
"""Get current time."""
from datetime import datetime
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Register individual tools
register(weather_tool, name="get_weather")
register(time_tool, name="current_time")
# Register toolbox
class UtilityToolbox(llm.Toolbox):
def tools(self):
return [
llm.Tool.function(weather_tool),
llm.Tool.function(time_tool)
]
register(UtilityToolbox, name="utilities")
# Tools are automatically available after plugin loading
all_tools = llm.get_tools()
print(f"Available tools: {list(all_tools.keys())}")This comprehensive tool system enables LLMs to perform complex operations while maintaining safety through controlled execution environments and proper error handling. The combination of individual tools and organized toolboxes provides flexibility for both simple function calling and complex multi-step workflows.
Install with Tessl CLI
npx tessl i tessl/pypi-llm