A TypeScript client for the Phoenix API providing AI observability, prompt management, datasets, experiments, and tracing capabilities.
Comprehensive prompt management system with version control, tagging, and multi-provider support. Enables creation, retrieval, and management of prompt templates with format conversion for popular LLM SDKs.
Retrieve prompts using various selection methods including ID, version ID, name, or name with tag.
/**
* Get a prompt from the Phoenix API using various selection methods
* @param params - Parameters including client and prompt selector
* @returns Promise resolving to prompt version or null if not found
*/
function getPrompt(params: {
client?: PhoenixClient;
prompt: PromptSelector;
}): Promise<PromptVersion | null>;
type PromptSelector =
| { promptId: string }
| { versionId: string }
| { name: string }
| { name: string; tag: string };
interface PromptVersion {
id: string;
prompt_id: string;
version: number;
description: string;
model_name: string;
model_provider: PromptModelProvider;
template_type: "CHAT";
template_format: "MUSTACHE" | "F_STRING" | "JINJA2";
template: PromptTemplate;
invocation_parameters: InvocationParameters;
created_at: string;
}
interface PromptTemplate {
type: "chat";
messages: PromptChatMessage[];
}
interface PromptChatMessage {
role: PromptChatMessageRole;
content: string;
}
type PromptChatMessageRole = "user" | "system" | "ai" | "tool";
type PromptModelProvider =
| "OPENAI"
| "AZURE_OPENAI"
| "ANTHROPIC"
| "GOOGLE"
| "DEEPSEEK"
| "XAI"
| "OLLAMA"
| "AWS";Usage Examples:
import { getPrompt } from "@arizeai/phoenix-client/prompts";
// Get by prompt ID
const prompt = await getPrompt({
prompt: { promptId: "prompt_123" }
});
// Get by name
const prompt = await getPrompt({
prompt: { name: "customer-support-chat" }
});
// Get by name and tag
const prompt = await getPrompt({
prompt: { name: "customer-support-chat", tag: "production" }
});
// Get by version ID
const prompt = await getPrompt({
prompt: { versionId: "version_456" }
});Create new prompts or add versions to existing prompts with support for multiple model providers.
/**
* Create a prompt and store it in Phoenix
* Creates new prompt or adds version to existing prompt with same name
* @param params - Prompt creation parameters
* @returns Promise resolving to created prompt version
*/
function createPrompt(params: {
client?: PhoenixClient;
name: string;
description?: string;
version: PromptVersionData;
}): Promise<PromptVersion>;
interface PromptVersionData {
description: string;
model_provider: PromptModelProvider;
model_name: string;
template_type: "CHAT";
template_format: "MUSTACHE" | "F_STRING" | "JINJA2";
template: PromptTemplate;
invocation_parameters: InvocationParameters;
}
type InvocationParameters =
| { type: "openai"; openai: OpenAIInvocationParameters }
| { type: "azure_openai"; azure_openai: AzureOpenAIInvocationParameters }
| { type: "anthropic"; anthropic: AnthropicInvocationParameters }
| { type: "google"; google: GoogleInvocationParameters }
| { type: "deepseek"; deepseek: DeepSeekInvocationParameters }
| { type: "xai"; xai: XAIInvocationParameters }
| { type: "ollama"; ollama: OllamaInvocationParameters }
| { type: "aws"; aws: AwsInvocationParameters };
interface OpenAIInvocationParameters {
temperature?: number;
max_tokens?: number;
top_p?: number;
frequency_penalty?: number;
presence_penalty?: number;
response_format?: { type: "text" | "json_object" };
seed?: number;
stop?: string | string[];
tools?: any[];
tool_choice?: string | object;
}
interface AnthropicInvocationParameters {
max_tokens: number; // Required for Anthropic
temperature?: number;
top_p?: number;
top_k?: number;
stop_sequences?: string[];
system?: string;
tools?: any[];
tool_choice?: object;
}Usage Example:
import { createPrompt, promptVersion } from "@arizeai/phoenix-client/prompts";
const prompt = await createPrompt({
name: "customer-support-chat",
description: "Customer support chatbot prompt",
version: promptVersion({
modelProvider: "OPENAI",
modelName: "gpt-4o",
template: [
{
role: "system",
content: "You are a helpful customer support agent. Be friendly and professional."
},
{
role: "user",
content: "{{user_message}}"
}
],
invocationParameters: {
temperature: 0.7,
max_tokens: 500
}
})
});Utility function to construct prompt version data declaratively for different model providers.
/**
* Helper function to construct prompt version data declaratively
* @param params - Model provider-specific input parameters
* @returns Structured prompt version data ready for creation
*/
function promptVersion(params: PromptVersionInput): PromptVersionData;
type PromptVersionInput =
| OpenAIPromptVersionInput
| AzureOpenAIPromptVersionInput
| AnthropicPromptVersionInput
| GooglePromptVersionInput
| DeepSeekPromptVersionInput
| XAIPromptVersionInput
| OllamaPromptVersionInput
| AwsPromptVersionInput;
interface OpenAIPromptVersionInput {
modelProvider: "OPENAI";
modelName: string;
template: PromptChatMessage[];
description?: string;
templateFormat?: "MUSTACHE" | "F_STRING" | "JINJA2";
invocationParameters?: OpenAIInvocationParameters;
}
interface AnthropicPromptVersionInput {
modelProvider: "ANTHROPIC";
modelName: string;
template: PromptChatMessage[];
description?: string;
templateFormat?: "MUSTACHE" | "F_STRING" | "JINJA2";
invocationParameters: AnthropicInvocationParameters; // Required
}Usage Examples:
import { promptVersion } from "@arizeai/phoenix-client/prompts";
// OpenAI prompt version
const openaiVersion = promptVersion({
modelProvider: "OPENAI",
modelName: "gpt-4o",
description: "Production version",
template: [
{ role: "system", content: "You are an AI assistant." },
{ role: "user", content: "{{question}}" }
],
invocationParameters: {
temperature: 0.3,
max_tokens: 1000
}
});
// Anthropic prompt version
const anthropicVersion = promptVersion({
modelProvider: "ANTHROPIC",
modelName: "claude-3-5-sonnet-20241022",
template: [
{ role: "system", content: "You are Claude, an AI assistant." },
{ role: "user", content: "{{user_input}}" }
],
invocationParameters: {
max_tokens: 1000, // Required for Anthropic
temperature: 0.5
}
});Internal utility functions for prompt processing and retrieval.
/**
* Internal utility to get prompt by various selector types
* @param params - Client and prompt selector
* @returns Promise resolving to prompt version or null
*/
function getPromptBySelector(params: {
client: PhoenixClient;
prompt: PromptSelector;
}): Promise<PromptVersion | null>;
/**
* Format prompt messages with variable substitution
* @param params - Template messages and variable values
* @returns Formatted messages with variables substituted
*/
function formatPromptMessages(params: {
messages: PromptChatMessage[];
variables?: Record<string, string | number | boolean>;
templateFormat?: "MUSTACHE" | "F_STRING" | "JINJA2";
}): PromptChatMessage[];Complete support for major AI model providers with provider-specific configuration options.
Supported Providers:
Provider-Specific Features:
Install with Tessl CLI
npx tessl i tessl/npm-arizeai--phoenix-client