CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-semantic-kernel

Semantic Kernel Python SDK - comprehensive AI development framework for building AI agents and multi-agent systems

Pending
Overview
Eval results
Files

agents.mddocs/

Agents and Multi-Agent Systems

Autonomous AI agents capable of conversation, collaboration, and orchestrated workflows. Supports various agent types, group chats, orchestration patterns, and complex multi-agent scenarios for building sophisticated AI systems.

Capabilities

Base Agent Framework

Core agent classes and interfaces for building autonomous AI agents.

class Agent:
    """
    Base class for AI agents in Semantic Kernel.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None
    ):
        """
        Initialize a base agent.
        
        Parameters:
        - service_id: ID of the AI service to use
        - kernel: Kernel instance for execution
        - name: Name of the agent
        - description: Description of the agent's purpose
        - instructions: System instructions for the agent
        """
    
    async def invoke(
        self,
        input: str | ChatHistory,
        **kwargs
    ) -> AgentResponseItem:
        """
        Invoke the agent with input.
        
        Parameters:
        - input: Input message or chat history
        - **kwargs: Additional arguments
        
        Returns:
        AgentResponseItem containing the agent's response
        """
    
    @property
    def name(self) -> str:
        """Get the agent name."""
    
    @property
    def description(self) -> str:
        """Get the agent description."""
    
    @property
    def instructions(self) -> str:
        """Get the agent instructions."""

class AgentThread:
    """
    Represents a conversation thread with an agent.
    """
    
    def __init__(self, agent: Agent):
        """
        Initialize an agent thread.
        
        Parameters:
        - agent: The agent for this thread
        """
    
    async def add_chat_message(self, message: ChatMessageContent) -> None:
        """
        Add a message to the thread.
        
        Parameters:
        - message: Message to add to the thread
        """
    
    async def invoke(self, message: str) -> list[ChatMessageContent]:
        """
        Send a message to the agent and get responses.
        
        Parameters:
        - message: Message to send to the agent
        
        Returns:
        List of response messages from the agent
        """
    
    @property
    def messages(self) -> list[ChatMessageContent]:
        """Get all messages in the thread."""

class AgentResponseItem:
    """
    Container for agent response data.
    """
    
    def __init__(
        self,
        content: ChatMessageContent,
        metadata: dict[str, Any] | None = None
    ):
        """
        Initialize agent response item.
        
        Parameters:
        - content: The response message content
        - metadata: Additional response metadata
        """
    
    @property
    def content(self) -> ChatMessageContent:
        """Get the response content."""
    
    @property
    def metadata(self) -> dict[str, Any]:
        """Get the response metadata."""

class AgentSpec:
    """
    Specification for configuring an agent.
    """
    
    def __init__(
        self,
        name: str,
        description: str,
        instructions: str,
        model_connection: ModelConnection,
        tools: list[ToolSpec] | None = None
    ):
        """
        Initialize agent specification.
        
        Parameters:
        - name: Agent name
        - description: Agent description
        - instructions: System instructions
        - model_connection: AI model connection details
        - tools: Available tools for the agent
        """
    
    @property
    def name(self) -> str:
        """Get the agent name."""
    
    @property
    def description(self) -> str:
        """Get the agent description."""
    
    @property
    def instructions(self) -> str:
        """Get the agent instructions."""

class AgentRegistry:
    """
    Registry for managing multiple agents.
    """
    
    def __init__(self):
        """Initialize agent registry."""
    
    def register_agent(self, agent: Agent) -> None:
        """
        Register an agent with the registry.
        
        Parameters:
        - agent: Agent to register
        """
    
    def get_agent(self, name: str) -> Agent:
        """
        Get an agent by name.
        
        Parameters:
        - name: Name of the agent to retrieve
        
        Returns:
        The requested agent
        """
    
    def list_agents(self) -> list[str]:
        """
        List all registered agent names.
        
        Returns:
        List of agent names
        """

Chat Completion Agents

Agents built on top of chat completion models for conversational AI.

class ChatCompletionAgent(Agent):
    """
    Agent powered by chat completion models.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None,
        execution_settings: PromptExecutionSettings | None = None
    ):
        """
        Initialize a chat completion agent.
        
        Parameters:
        - service_id: ID of the chat completion service
        - kernel: Kernel instance for execution
        - name: Name of the agent
        - description: Description of the agent's purpose  
        - instructions: System instructions for the agent
        - execution_settings: Settings for chat completion
        """
    
    async def invoke(
        self,
        input: str | ChatHistory,
        **kwargs
    ) -> AgentResponseItem:
        """
        Invoke the chat completion agent.
        
        Parameters:
        - input: Input message or chat history
        - **kwargs: Additional arguments
        
        Returns:
        AgentResponseItem with the agent's response
        """

class ChatHistoryAgentThread(AgentThread):
    """
    Agent thread that maintains chat history.
    """
    
    def __init__(
        self,
        agent: ChatCompletionAgent,
        chat_history: ChatHistory | None = None
    ):
        """
        Initialize chat history agent thread.
        
        Parameters:
        - agent: The chat completion agent
        - chat_history: Initial chat history (optional)
        """
    
    async def invoke(self, message: str) -> list[ChatMessageContent]:
        """
        Send message and get response while maintaining history.
        
        Parameters:
        - message: Message to send to the agent
        
        Returns:
        List of response messages from the agent
        """
    
    @property
    def chat_history(self) -> ChatHistory:
        """Get the chat history for this thread."""

OpenAI Assistant Agents

Agents that integrate with OpenAI's Assistant API.

class OpenAIAssistantAgent(Agent):
    """
    Agent that uses OpenAI's Assistant API.
    """
    
    def __init__(
        self,
        kernel: Kernel,
        service_id: str,
        name: str,
        instructions: str | None = None,
        description: str | None = None,
        assistant_id: str | None = None,
        enable_code_interpreter: bool = False,
        enable_file_search: bool = False,
        enable_json_response: bool = False,
        file_ids: list[str] | None = None,
        metadata: dict[str, str] | None = None,
        max_completion_tokens: int | None = None,
        max_prompt_tokens: int | None = None,
        parallel_tool_calls_enabled: bool = True,
        truncation_message_count: int | None = None,
        temperature: float | None = None,
        top_p: float | None = None,
        vector_store_id: str | None = None
    ):
        """
        Initialize OpenAI Assistant agent.
        
        Parameters:
        - kernel: Kernel instance for execution
        - service_id: ID of the OpenAI service
        - name: Assistant name
        - instructions: System instructions for the assistant
        - description: Assistant description
        - assistant_id: Existing assistant ID (if using existing assistant)
        - enable_code_interpreter: Enable code interpretation tool
        - enable_file_search: Enable file search tool
        - enable_json_response: Enable JSON response format
        - file_ids: List of file IDs for the assistant
        - metadata: Additional metadata
        - max_completion_tokens: Maximum completion tokens
        - max_prompt_tokens: Maximum prompt tokens
        - parallel_tool_calls_enabled: Enable parallel tool calls
        - truncation_message_count: Message truncation count
        - temperature: Sampling temperature
        - top_p: Nucleus sampling parameter
        - vector_store_id: Vector store ID for file search
        """
    
    async def create_thread(self) -> AssistantAgentThread:
        """
        Create a new conversation thread.
        
        Returns:
        AssistantAgentThread for conversation
        """
    
    async def delete(self) -> None:
        """Delete the assistant from OpenAI."""

class AzureAssistantAgent(Agent):
    """
    Agent that uses Azure OpenAI's Assistant API.
    """
    
    def __init__(
        self,
        kernel: Kernel,
        service_id: str,
        name: str,
        instructions: str | None = None,
        description: str | None = None,
        assistant_id: str | None = None,
        enable_code_interpreter: bool = False,
        enable_file_search: bool = False,
        enable_json_response: bool = False,
        file_ids: list[str] | None = None,
        metadata: dict[str, str] | None = None,
        max_completion_tokens: int | None = None,
        max_prompt_tokens: int | None = None,
        parallel_tool_calls_enabled: bool = True,
        truncation_message_count: int | None = None,
        temperature: float | None = None,
        top_p: float | None = None,
        vector_store_id: str | None = None
    ):
        """
        Initialize Azure OpenAI Assistant agent.
        
        Parameters: (same as OpenAIAssistantAgent)
        """
    
    async def create_thread(self) -> AssistantAgentThread:
        """
        Create a new conversation thread.
        
        Returns:
        AssistantAgentThread for conversation
        """

class AssistantAgentThread(AgentThread):
    """
    Thread for OpenAI/Azure Assistant agents.
    """
    
    def __init__(self, agent: OpenAIAssistantAgent | AzureAssistantAgent):
        """
        Initialize assistant agent thread.
        
        Parameters:
        - agent: The assistant agent
        """
    
    async def invoke(
        self,
        message: str,
        polling_options: RunPollingOptions | None = None
    ) -> list[ChatMessageContent]:
        """
        Send message and get response from assistant.
        
        Parameters:
        - message: Message to send
        - polling_options: Options for polling the run status
        
        Returns:
        List of response messages from the assistant
        """
    
    async def add_chat_message(self, message: ChatMessageContent) -> None:
        """
        Add a message to the assistant thread.
        
        Parameters:
        - message: Message to add
        """

class RunPollingOptions:
    """
    Configuration for polling assistant run status.
    """
    
    def __init__(
        self,
        sleep_interval: int = 1,
        max_polling_iterations: int = 50
    ):
        """
        Initialize polling options.
        
        Parameters:
        - sleep_interval: Seconds to sleep between polls
        - max_polling_iterations: Maximum number of polling iterations
        """
    
    @property
    def sleep_interval(self) -> int:
        """Get the sleep interval."""
    
    @property
    def max_polling_iterations(self) -> int:
        """Get the maximum polling iterations."""

Multi-Agent Group Chat

Systems for managing conversations between multiple agents.

class AgentGroupChat:
    """
    Manages conversations between multiple agents.
    """
    
    def __init__(
        self,
        agents: list[Agent] | None = None,
        selection_strategy: SelectionStrategy | None = None,
        termination_strategy: TerminationStrategy | None = None
    ):
        """
        Initialize agent group chat.
        
        Parameters:
        - agents: List of agents participating in the group chat
        - selection_strategy: Strategy for selecting next agent to speak
        - termination_strategy: Strategy for determining when to end conversation
        """
    
    def add_agent(self, agent: Agent) -> None:
        """
        Add an agent to the group chat.
        
        Parameters:
        - agent: Agent to add to the group
        """
    
    async def invoke(
        self,
        message: str,
        max_turns: int | None = None
    ) -> list[ChatMessageContent]:
        """
        Start or continue a group conversation.
        
        Parameters:
        - message: Initial message to start the conversation
        - max_turns: Maximum number of conversation turns
        
        Returns:
        List of all messages in the group conversation
        """
    
    @property
    def agents(self) -> list[Agent]:
        """Get the agents in the group chat."""
    
    @property
    def chat_history(self) -> ChatHistory:
        """Get the complete chat history."""

class AgentChat:
    """
    Simplified interface for agent conversations.
    """
    
    def __init__(self, agents: list[Agent]):
        """
        Initialize agent chat.
        
        Parameters:
        - agents: List of agents for the chat
        """
    
    async def invoke(
        self,
        message: str,
        termination_strategy: TerminationStrategy | None = None
    ) -> ChatHistory:
        """
        Invoke agent chat with a message.
        
        Parameters:
        - message: Message to start the conversation
        - termination_strategy: Strategy for ending the conversation
        
        Returns:
        ChatHistory containing the full conversation
        """

Specialized Agent Types

Additional agent implementations for specific platforms and use cases.

class BedrockAgent(Agent):
    """
    Agent that integrates with Amazon Bedrock.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None,
        agent_id: str | None = None,
        agent_alias_id: str | None = None
    ):
        """
        Initialize Bedrock agent.
        
        Parameters:
        - service_id: ID of the Bedrock service
        - kernel: Kernel instance for execution
        - name: Agent name
        - description: Agent description
        - instructions: System instructions
        - agent_id: Bedrock agent ID
        - agent_alias_id: Bedrock agent alias ID
        """

class BedrockAgentThread(AgentThread):
    """
    Thread for Bedrock agents.
    """
    
    def __init__(self, agent: BedrockAgent):
        """
        Initialize Bedrock agent thread.
        
        Parameters:
        - agent: The Bedrock agent
        """

class AzureAIAgent(Agent):
    """
    Agent that integrates with Azure AI services.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None,
        settings: AzureAIAgentSettings | None = None
    ):
        """
        Initialize Azure AI agent.
        
        Parameters:
        - service_id: ID of the Azure AI service
        - kernel: Kernel instance for execution
        - name: Agent name
        - description: Agent description
        - instructions: System instructions
        - settings: Azure AI agent settings
        """

class AzureAIAgentThread(AgentThread):
    """
    Thread for Azure AI agents.
    """
    
    def __init__(self, agent: AzureAIAgent):
        """
        Initialize Azure AI agent thread.
        
        Parameters:
        - agent: The Azure AI agent
        """

class CopilotStudioAgent(Agent):
    """
    Agent that integrates with Microsoft Copilot Studio.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None,
        settings: CopilotStudioAgentSettings | None = None
    ):
        """
        Initialize Copilot Studio agent.
        
        Parameters:
        - service_id: ID of the Copilot Studio service
        - kernel: Kernel instance for execution
        - name: Agent name
        - description: Agent description
        - instructions: System instructions
        - settings: Copilot Studio agent settings
        """

class CopilotStudioAgentThread(AgentThread):
    """
    Thread for Copilot Studio agents.
    """
    
    def __init__(self, agent: CopilotStudioAgent):
        """
        Initialize Copilot Studio agent thread.
        
        Parameters:
        - agent: The Copilot Studio agent
        """

class AutoGenConversableAgent(Agent):
    """
    Agent that integrates with the AutoGen framework.
    """
    
    def __init__(
        self,
        service_id: str,
        kernel: Kernel,
        name: str | None = None,
        description: str | None = None,
        instructions: str | None = None,
        system_message: str | None = None
    ):
        """
        Initialize AutoGen conversable agent.
        
        Parameters:
        - service_id: ID of the AI service
        - kernel: Kernel instance for execution
        - name: Agent name
        - description: Agent description
        - instructions: System instructions
        - system_message: AutoGen system message
        """

class AutoGenConversableAgentThread(AgentThread):
    """
    Thread for AutoGen conversable agents.
    """
    
    def __init__(self, agent: AutoGenConversableAgent):
        """
        Initialize AutoGen conversable agent thread.
        
        Parameters:
        - agent: The AutoGen conversable agent
        """

Agent Orchestration

Orchestration patterns for coordinating multiple agents.

class ConcurrentOrchestration:
    """
    Orchestration that runs agents concurrently.
    """
    
    def __init__(self, agents: list[Agent]):
        """
        Initialize concurrent orchestration.
        
        Parameters:
        - agents: List of agents to orchestrate
        """
    
    async def invoke(
        self,
        message: str,
        **kwargs
    ) -> list[AgentResponseItem]:
        """
        Invoke all agents concurrently.
        
        Parameters:
        - message: Message to send to all agents
        - **kwargs: Additional arguments
        
        Returns:
        List of responses from all agents
        """

class SequentialOrchestration:
    """
    Orchestration that runs agents in sequence.
    """
    
    def __init__(self, agents: list[Agent]):
        """
        Initialize sequential orchestration.
        
        Parameters:
        - agents: List of agents in execution order
        """
    
    async def invoke(
        self,
        message: str,
        **kwargs
    ) -> list[AgentResponseItem]:
        """
        Invoke agents sequentially.
        
        Parameters:
        - message: Initial message for the sequence
        - **kwargs: Additional arguments
        
        Returns:
        List of responses from each agent in sequence
        """

class HandoffOrchestration:
    """
    Orchestration with agent handoff capabilities.
    """
    
    def __init__(
        self,
        agents: list[Agent],
        handoffs: OrchestrationHandoffs
    ):
        """
        Initialize handoff orchestration.
        
        Parameters:
        - agents: List of available agents
        - handoffs: Handoff configuration between agents
        """
    
    async def invoke(
        self,
        message: str,
        initial_agent: str | None = None,
        **kwargs
    ) -> list[AgentResponseItem]:
        """
        Invoke with agent handoff capabilities.
        
        Parameters:
        - message: Initial message
        - initial_agent: Name of the initial agent to start with
        - **kwargs: Additional arguments
        
        Returns:
        List of responses including handoffs
        """

class GroupChatOrchestration:
    """
    Orchestration for group chat scenarios.
    """
    
    def __init__(
        self,
        agents: list[Agent],
        chat_manager: GroupChatManager
    ):
        """
        Initialize group chat orchestration.
        
        Parameters:
        - agents: List of agents in the group
        - chat_manager: Manager for the group chat
        """
    
    async def invoke(
        self,
        message: str,
        max_turns: int | None = None,
        **kwargs
    ) -> ChatHistory:
        """
        Invoke group chat orchestration.
        
        Parameters:
        - message: Initial message for the group
        - max_turns: Maximum number of conversation turns
        - **kwargs: Additional arguments
        
        Returns:
        ChatHistory containing the group conversation
        """

Usage Examples

Basic Agent Setup

from semantic_kernel import Kernel
from semantic_kernel.agents import ChatCompletionAgent
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion

# Setup kernel with chat service
kernel = Kernel()
chat_service = OpenAIChatCompletion(
    service_id="gpt-4-agent",
    ai_model_id="gpt-4",
    api_key="your-api-key"
)
kernel.add_service(chat_service)

# Create an agent
agent = ChatCompletionAgent(
    service_id="gpt-4-agent",
    kernel=kernel,
    name="Assistant",
    instructions="You are a helpful AI assistant.",
    description="General purpose AI assistant"
)

# Use the agent
response = await agent.invoke("Hello, can you help me with Python?")
print(response.content.content)

Multi-Agent Group Chat

from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent

# Create multiple specialized agents
researcher = ChatCompletionAgent(
    service_id="gpt-4-agent",
    kernel=kernel,
    name="Researcher",
    instructions="You are a research specialist. Focus on gathering facts and data."
)

writer = ChatCompletionAgent(
    service_id="gpt-4-agent", 
    kernel=kernel,
    name="Writer",
    instructions="You are a professional writer. Focus on clear, engaging content."
)

critic = ChatCompletionAgent(
    service_id="gpt-4-agent",
    kernel=kernel,
    name="Critic", 
    instructions="You are a constructive critic. Provide feedback and suggestions."
)

# Create group chat
group_chat = AgentGroupChat(agents=[researcher, writer, critic])

# Start conversation
messages = await group_chat.invoke(
    "I need help writing an article about renewable energy",
    max_turns=10
)

# Review conversation
for message in messages:
    print(f"{message.name}: {message.content}")

OpenAI Assistant Integration

from semantic_kernel.agents import OpenAIAssistantAgent

# Create assistant agent with tools
assistant = OpenAIAssistantAgent(
    kernel=kernel,
    service_id="openai-assistant",
    name="Code Helper",
    instructions="You are a coding assistant with access to code interpretation.",
    description="Helps with code analysis and execution",
    enable_code_interpreter=True,
    enable_file_search=True
)

# Create thread and have conversation
thread = await assistant.create_thread()
responses = await thread.invoke("Can you help me analyze this Python script?")

for response in responses:
    print(response.content)

Install with Tessl CLI

npx tessl i tessl/pypi-semantic-kernel

docs

agents.md

ai-connectors.md

content-types.md

core-kernel.md

core-plugins.md

filters.md

index.md

memory-stores.md

processes.md

prompt-templates.md

tile.json