A TypeScript client for the Phoenix API providing AI observability, prompt management, datasets, experiments, and tracing capabilities.
Format conversion utilities for seamless integration with popular LLM provider SDKs including OpenAI, Anthropic, and Vercel AI SDK. Enables easy conversion of Phoenix prompts to SDK-specific formats.
Convert Phoenix prompts to various SDK formats with variable substitution support.
/**
* Convert a Phoenix prompt to a specific SDK format
* @param params - SDK conversion parameters including prompt, variables, and target SDK
* @returns SDK-specific format ready for use
*/
function toSDK<T extends SupportedSDK, V extends Variables = Variables>(
params: ToSDKParams<T, V> & SDKParams<T>
): ReturnType<SDKConverter<T>>;
type ToSDKParams<T extends SupportedSDK, V extends Variables = Variables> = {
sdk: T;
variables?: V;
};
type SDKParams<T extends SupportedSDK> = {
prompt: PromptVersion;
};
type SupportedSDK = "openai" | "anthropic" | "ai";
type Variables = Record<string, string | { toString: () => string }>;Usage Example:
import { toSDK } from "@arizeai/phoenix-client/prompts";
import { getPrompt } from "@arizeai/phoenix-client/prompts";
// Get a prompt from Phoenix
const prompt = await getPrompt({
prompt: { name: "customer-support", tag: "production" }
});
if (prompt) {
// Convert to OpenAI format
const openaiFormat = toSDK({
prompt,
variables: {
user_name: "Alice",
issue_type: "billing"
},
sdk: "openai"
});
// Use with OpenAI SDK
import OpenAI from "openai";
const openai = new OpenAI();
const response = await openai.chat.completions.create({
model: prompt.model_name,
...openaiFormat,
...prompt.invocation_parameters.openai
});
}Convert Phoenix prompts to OpenAI SDK format with full parameter support.
/**
* Convert Phoenix prompt to OpenAI SDK format
* @param params - OpenAI conversion parameters
* @returns OpenAI-compatible chat completion format
*/
function toOpenAI(params: {
prompt: PromptVersion;
variables?: Variables;
}): OpenAIFormat;
interface OpenAIFormat {
messages: OpenAIMessage[];
}
interface OpenAIMessage {
role: "system" | "user" | "assistant" | "tool";
content: string;
name?: string;
tool_call_id?: string;
tool_calls?: any[];
}Usage Examples:
import { toOpenAI } from "@arizeai/phoenix-client/prompts";
import OpenAI from "openai";
const prompt = await getPrompt({ prompt: { name: "qa-assistant" } });
const openai = new OpenAI();
// Convert and use
const { messages } = toOpenAI({
prompt,
variables: { question: "What is machine learning?" }
});
const response = await openai.chat.completions.create({
model: prompt.model_name,
messages,
temperature: prompt.invocation_parameters.openai?.temperature || 0.7
});
console.log(response.choices[0].message.content);Convert Phoenix prompts to Anthropic SDK format with Claude-specific formatting.
/**
* Convert Phoenix prompt to Anthropic SDK format
* @param params - Anthropic conversion parameters
* @returns Anthropic-compatible message format
*/
function toAnthropic(params: {
prompt: PromptVersion;
variables?: Variables;
}): AnthropicFormat;
interface AnthropicFormat {
messages: AnthropicMessage[];
system?: string;
}
interface AnthropicMessage {
role: "user" | "assistant";
content: string | AnthropicContentBlock[];
}
interface AnthropicContentBlock {
type: "text" | "image";
text?: string;
source?: {
type: "base64";
media_type: string;
data: string;
};
}Usage Examples:
import { toAnthropic } from "@arizeai/phoenix-client/prompts";
import Anthropic from "@anthropic-ai/sdk";
const prompt = await getPrompt({ prompt: { name: "writing-assistant" } });
const anthropic = new Anthropic();
// Convert and use
const { messages, system } = toAnthropic({
prompt,
variables: {
topic: "sustainable energy",
tone: "professional"
}
});
const response = await anthropic.messages.create({
model: prompt.model_name,
max_tokens: prompt.invocation_parameters.anthropic.max_tokens,
system,
messages,
temperature: prompt.invocation_parameters.anthropic.temperature || 0.7
});
console.log(response.content[0].text);Convert Phoenix prompts to Vercel AI SDK format for streamlined AI application development.
/**
* Convert Phoenix prompt to Vercel AI SDK format
* @param params - Vercel AI conversion parameters
* @returns Vercel AI-compatible message format
*/
function toAI(params: {
prompt: PromptVersion;
variables?: Variables;
}): VercelAIFormat;
interface VercelAIFormat {
messages: VercelAIMessage[];
}
interface VercelAIMessage {
role: "system" | "user" | "assistant" | "tool";
content: string;
name?: string;
toolInvocations?: any[];
}Usage Examples:
import { toAI } from "@arizeai/phoenix-client/prompts";
import { generateText } from "ai";
import { openai } from "@ai-sdk/openai";
const prompt = await getPrompt({ prompt: { name: "code-assistant" } });
// Convert and use with Vercel AI SDK
const { messages } = toAI({
prompt,
variables: {
programming_language: "TypeScript",
task_description: "Create a REST API endpoint"
}
});
const { text } = await generateText({
model: openai(prompt.model_name),
messages,
temperature: prompt.invocation_parameters.openai?.temperature || 0.7
});
console.log(text);All SDK conversion functions support Mustache-style variable substitution in prompt templates.
Variable Syntax:
{{variable_name}} - Simple variable substitution{{#if variable}}...{{/if}} - Conditional blocks (where supported){{#each items}}...{{/each}} - Iteration blocks (where supported)Variable Types:
interface Variables {
[key: string]: string | number | boolean | string[] | Record<string, any>;
}Usage Examples:
// Simple variable substitution
const result = toSDK({
prompt,
variables: {
user_name: "Alice",
temperature_value: 0.7,
is_premium_user: true
},
sdk: "openai"
});
// Complex variable substitution
const result = toSDK({
prompt,
variables: {
user_profile: {
name: "Alice",
preferences: ["tech", "science"],
subscription: "premium"
},
conversation_context: [
"Previous question about AI",
"User interested in machine learning"
]
},
sdk: "anthropic"
});Each SDK conversion maintains compatibility with provider-specific parameters and features.
OpenAI Parameters:
// Phoenix prompt with OpenAI parameters
const prompt = await createPrompt({
name: "openai-optimized",
version: promptVersion({
modelProvider: "OPENAI",
modelName: "gpt-4o",
template: [...],
invocationParameters: {
temperature: 0.7,
max_tokens: 2000,
top_p: 0.9,
frequency_penalty: 0.1,
presence_penalty: 0.1,
response_format: { type: "json_object" },
tools: [...] // Function calling tools
}
})
});
const openaiFormat = toOpenAI({ prompt });
// Use all parameters when calling OpenAIAnthropic Parameters:
// Phoenix prompt with Anthropic parameters
const prompt = await createPrompt({
name: "anthropic-optimized",
version: promptVersion({
modelProvider: "ANTHROPIC",
modelName: "claude-3-5-sonnet-20241022",
template: [...],
invocationParameters: {
max_tokens: 4000, // Required
temperature: 0.5,
top_p: 0.9,
top_k: 40,
stop_sequences: ["</thinking>"]
}
})
});
const anthropicFormat = toAnthropic({ prompt });
// Parameters preserved for Anthropic API callsCommon patterns for integrating Phoenix prompts with different SDKs.
Dynamic Model Selection:
const prompt = await getPrompt({ prompt: { name: "multi-provider-prompt" } });
// Route to appropriate SDK based on model provider
switch (prompt.model_provider) {
case "OPENAI":
const openaiResult = toOpenAI({ prompt, variables });
return await callOpenAI(openaiResult);
case "ANTHROPIC":
const anthropicResult = toAnthropic({ prompt, variables });
return await callAnthropic(anthropicResult);
case "GOOGLE":
// Handle Google models
break;
}A/B Testing with Prompts:
const promptA = await getPrompt({ prompt: { name: "variant-a" } });
const promptB = await getPrompt({ prompt: { name: "variant-b" } });
const variant = Math.random() < 0.5 ? promptA : promptB;
const format = toSDK({ prompt: variant, variables, sdk: "openai" });
// Track which variant was used for analysis
await trackExperiment({
variant: variant.id,
format: format
});Prompt Caching:
// Cache converted formats for performance
const cache = new Map<string, any>();
function getCachedPrompt(promptId: string, variables: Variables, sdk: SupportedSDK) {
const key = `${promptId}-${JSON.stringify(variables)}-${sdk}`;
if (!cache.has(key)) {
const prompt = await getPrompt({ prompt: { promptId } });
const format = toSDK({ prompt, variables, sdk });
cache.set(key, format);
}
return cache.get(key);
}Install with Tessl CLI
npx tessl i tessl/npm-arizeai--phoenix-client