LLM framework to build customizable, production-ready LLM applications.
—
Create and format prompts for language models with dynamic content injection, template rendering, and structured prompt construction. Haystack provides flexible prompt building components for text generation and chat completion models.
Build dynamic text prompts with template variables and conditional logic.
class PromptBuilder:
def __init__(
self,
template: str,
required_variables: Optional[List[str]] = None,
variables: Optional[List[str]] = None
) -> None:
"""
Initialize prompt builder with Jinja2 template.
Args:
template: Jinja2 template string with placeholder variables
required_variables: List of variables that must be provided
variables: List of all available template variables
"""
def run(self, **kwargs) -> Dict[str, str]:
"""
Build prompt by rendering template with provided variables.
Args:
**kwargs: Template variables to inject into the prompt
Returns:
Dictionary with 'prompt' key containing the rendered prompt string
"""
@property
def template(self) -> str:
"""Get the current template string."""
def set_template(self, template: str) -> None:
"""
Update the template string.
Args:
template: New Jinja2 template string
"""Build structured chat prompts with message formatting and role management.
class ChatPromptBuilder:
def __init__(
self,
template: List[ChatMessage],
required_variables: Optional[List[str]] = None,
variables: Optional[List[str]] = None
) -> None:
"""
Initialize chat prompt builder with message templates.
Args:
template: List of ChatMessage templates with placeholder variables
required_variables: List of variables that must be provided
variables: List of all available template variables
"""
def run(self, **kwargs) -> Dict[str, List[ChatMessage]]:
"""
Build chat prompt by rendering message templates.
Args:
**kwargs: Template variables to inject into messages
Returns:
Dictionary with 'prompt' key containing list of rendered ChatMessages
"""
@property
def template(self) -> List[ChatMessage]:
"""Get the current message templates."""
def set_template(self, template: List[ChatMessage]) -> None:
"""
Update the message templates.
Args:
template: New list of ChatMessage templates
"""Format and structure answers with source citations and metadata.
class AnswerBuilder:
def __init__(
self,
pattern: Optional[str] = None,
reference_pattern: Optional[str] = None
) -> None:
"""
Initialize answer builder.
Args:
pattern: Pattern for formatting the main answer
reference_pattern: Pattern for formatting document references
"""
def run(
self,
query: str,
replies: List[str],
documents: Optional[List[Document]] = None,
pattern: Optional[str] = None,
reference_pattern: Optional[str] = None
) -> Dict[str, List[GeneratedAnswer]]:
"""
Build structured answers with citations and references.
Args:
query: Original question or query
replies: Generated answer texts from LLM
documents: Source documents used for answer generation
pattern: Custom formatting pattern for answers
reference_pattern: Custom pattern for document references
Returns:
Dictionary with 'answers' key containing list of GeneratedAnswer objects
"""from haystack.components.builders import PromptBuilder
# Create a prompt template
template = """
You are a helpful assistant. Answer the following question based on the provided context.
Context: {{context}}
Question: {{question}}
Answer:
"""
# Initialize prompt builder
prompt_builder = PromptBuilder(template=template)
# Build prompt with variables
result = prompt_builder.run(
context="Python is a high-level programming language known for its simplicity.",
question="What is Python?"
)
print(result["prompt"])
# Output:
# You are a helpful assistant. Answer the following question based on the provided context.
#
# Context: Python is a high-level programming language known for its simplicity.
#
# Question: What is Python?
#
# Answer:# Template with conditional logic and loops
advanced_template = """
System: You are an expert {{domain}} assistant.
{% if context_documents %}
Available Context:
{% for doc in context_documents %}
- {{doc.content}}
{% endfor %}
{% endif %}
{% if examples %}
Examples:
{% for example in examples %}
Q: {{example.question}}
A: {{example.answer}}
{% endfor %}
{% endif %}
User Query: {{user_query}}
Please provide a {{response_type}} response.
"""
prompt_builder = PromptBuilder(template=advanced_template)
result = prompt_builder.run(
domain="machine learning",
context_documents=[
{"content": "Neural networks are computational models inspired by biological neural networks."}
],
examples=[
{"question": "What is supervised learning?", "answer": "Learning with labeled training data."}
],
user_query="Explain deep learning",
response_type="detailed"
)
print(result["prompt"])from haystack.components.builders import ChatPromptBuilder
from haystack.dataclasses import ChatMessage, ChatRole
# Create chat message templates
message_templates = [
ChatMessage(
content="You are a helpful assistant specialized in {{domain}}. "
"Always provide accurate and helpful information.",
role=ChatRole.SYSTEM
),
ChatMessage(
content="Context: {{context}}\n\nBased on this context, {{instruction}}",
role=ChatRole.USER
)
]
# Initialize chat prompt builder
chat_prompt_builder = ChatPromptBuilder(template=message_templates)
# Build chat prompt
result = chat_prompt_builder.run(
domain="software engineering",
context="Python is an interpreted, object-oriented programming language.",
instruction="explain the key features of Python"
)
chat_messages = result["prompt"]
for message in chat_messages:
print(f"{message.role.value}: {message.content}")# Build conversation with dynamic message history
conversation_template = [
ChatMessage(
content="You are {{assistant_persona}}. "
"{% if conversation_style %}Use a {{conversation_style}} tone.{% endif %}",
role=ChatRole.SYSTEM
)
]
# Add dynamic message history
conversation_template.extend([
ChatMessage(
content="{{msg.content}}",
role=ChatRole(msg.role)
) for msg in "{{message_history}}"
])
# Add current user message
conversation_template.append(
ChatMessage(
content="{{current_message}}",
role=ChatRole.USER
)
)
chat_builder = ChatPromptBuilder(template=conversation_template)
# Build with conversation history
result = chat_builder.run(
assistant_persona="a friendly coding tutor",
conversation_style="encouraging",
message_history=[
{"content": "I'm learning Python", "role": "user"},
{"content": "That's great! Python is an excellent language to start with.", "role": "assistant"}
],
current_message="Can you explain functions?"
)from haystack.components.builders import AnswerBuilder
from haystack import Document
# Initialize answer builder
answer_builder = AnswerBuilder(
pattern="Answer: {answer}\n\nSources: {references}",
reference_pattern="[{idx}] {source}: {content}"
)
# Prepare documents and generated replies
documents = [
Document(content="Python was created by Guido van Rossum in 1991.",
meta={"source": "Python History Wiki"}),
Document(content="Python is known for its readable syntax and extensive libraries.",
meta={"source": "Python Documentation"})
]
replies = ["Python is a programming language created by Guido van Rossum in 1991. It's known for its readable syntax."]
# Build structured answer
result = answer_builder.run(
query="Who created Python and when?",
replies=replies,
documents=documents
)
answer = result["answers"][0]
print(f"Query: {answer.query}")
print(f"Answer: {answer.data}")
print(f"Documents used: {len(answer.documents)}")from haystack import Pipeline
from haystack.components.builders import PromptBuilder
from haystack.components.generators import OpenAIGenerator
from haystack.components.retrievers import InMemoryEmbeddingRetriever
# Create RAG pipeline with prompt building
rag_pipeline = Pipeline()
# RAG prompt template
rag_template = """
Answer the question based ONLY on the provided context. If the context doesn't contain
enough information to answer the question, say "I don't have enough information to answer this question."
Context:
{% for doc in documents %}
- {{doc.content}}
{% endfor %}
Question: {{question}}
Answer:
"""
# Add components
rag_pipeline.add_component("retriever", InMemoryEmbeddingRetriever(document_store=document_store, top_k=3))
rag_pipeline.add_component("prompt_builder", PromptBuilder(template=rag_template))
rag_pipeline.add_component("generator", OpenAIGenerator(model="gpt-3.5-turbo-instruct"))
# Connect components
rag_pipeline.connect("retriever.documents", "prompt_builder.documents")
rag_pipeline.connect("prompt_builder.prompt", "generator.prompt")
# Run RAG pipeline
result = rag_pipeline.run({
"retriever": {"query_embedding": query_embedding},
"prompt_builder": {"question": "What is Python used for?"}
})
print(result["generator"]["replies"][0])from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
# Create multi-turn chat pipeline
chat_pipeline = Pipeline()
# Multi-turn template with memory
chat_template = [
ChatMessage(
content="You are a helpful assistant. Keep track of the conversation context.",
role=ChatRole.SYSTEM
)
]
# Add conversation history dynamically
for msg in "{{conversation_history}}":
chat_template.append(
ChatMessage(content=msg["content"], role=ChatRole(msg["role"]))
)
# Add current message
chat_template.append(
ChatMessage(content="{{current_input}}", role=ChatRole.USER)
)
# Add components
chat_pipeline.add_component("chat_prompt_builder", ChatPromptBuilder(template=chat_template))
chat_pipeline.add_component("chat_generator", OpenAIChatGenerator(model="gpt-3.5-turbo"))
# Connect components
chat_pipeline.connect("chat_prompt_builder.prompt", "chat_generator.messages")
# Simulate conversation
conversation_history = []
def chat_turn(user_input: str) -> str:
result = chat_pipeline.run({
"chat_prompt_builder": {
"conversation_history": conversation_history,
"current_input": user_input
}
})
response = result["chat_generator"]["replies"][0]
# Update conversation history
conversation_history.append({"content": user_input, "role": "user"})
conversation_history.append({"content": response.content, "role": "assistant"})
return response.content
# Use the chat system
response1 = chat_turn("My name is Alice")
response2 = chat_turn("What's my name?") # Should remember "Alice"from jinja2 import Environment
# Create prompt builder with custom filters
def truncate_text(text: str, max_length: int = 100) -> str:
"""Custom filter to truncate text."""
if len(text) <= max_length:
return text
return text[:max_length-3] + "..."
def format_documents(documents: List[Document], max_docs: int = 3) -> str:
"""Custom function to format documents."""
formatted = []
for i, doc in enumerate(documents[:max_docs]):
formatted.append(f"Document {i+1}: {doc.content}")
return "\n".join(formatted)
# Template using custom functions
template_with_functions = """
Query: {{query}}
Context (showing top {{max_docs}} documents):
{{documents|format_documents(max_docs)}}
Each document truncated to {{max_length}} characters:
{% for doc in documents %}
- {{doc.content|truncate_text(max_length)}}
{% endfor %}
Answer:
"""
prompt_builder = PromptBuilder(template=template_with_functions)
# Add custom filters to the Jinja2 environment
env = Environment()
env.filters['truncate_text'] = truncate_text
env.filters['format_documents'] = format_documents
# Use with custom functions
result = prompt_builder.run(
query="What is machine learning?",
documents=documents,
max_docs=2,
max_length=50
)from typing import List, Dict, Any, Optional
from haystack.dataclasses import ChatMessage, Document, GeneratedAnswer
class TemplateVariable:
name: str
type: str
required: bool
default: Optional[Any]
class PromptTemplate:
content: str
variables: List[TemplateVariable]
class ChatTemplate:
messages: List[ChatMessage]
variables: List[TemplateVariable]Install with Tessl CLI
npx tessl i tessl/pypi-haystack-ai