Langfuse API client for universal JavaScript environments providing observability, prompt management, datasets, experiments, and scoring capabilities
The MediaManager provides functionality to resolve media references in Langfuse trace data. Media content (images, audio, video, PDFs, etc.) is stored separately and referenced using special string identifiers. The MediaManager resolves these references back to usable content like base64 data URIs.
Replace media reference strings in an object with base64 data URIs by fetching the actual media content from Langfuse.
/**
* Resolves media reference strings in an object to base64 data URIs
*
* Recursively traverses the input object looking for media reference strings
* in the format "@@@langfuseMedia:...@@@". When found, fetches the actual
* media content and replaces the reference with a base64 data URI.
*
* If fetching fails for a reference, a warning is logged and the reference
* string is left unchanged in the output.
*
* @template T - The type of the object being processed
* @param params - Configuration object for resolving references
* @returns A deep copy of the input with media references resolved
*/
async resolveReferences<T>(
params: LangfuseMediaResolveMediaReferencesParams<T>
): Promise<T>;
type LangfuseMediaResolveMediaReferencesParams<T> = {
/** The object to process for media references */
obj: T;
/** The format to resolve media references to (currently only "base64DataUri" is supported) */
resolveWith: "base64DataUri";
/** Maximum depth to traverse when processing nested objects (default: 10) */
maxDepth?: number;
};Usage Examples:
import { LangfuseClient } from '@langfuse/client';
const langfuse = new LangfuseClient();
// Simple object with media reference
const obj = {
image: "@@@langfuseMedia:type=image/jpeg|id=abc123|source=bytes@@@",
text: "This is a regular string"
};
const resolved = await langfuse.media.resolveReferences({
obj,
resolveWith: "base64DataUri"
});
// Result:
// {
// image: "data:image/jpeg;base64,/9j/4AAQSkZJRg...",
// text: "This is a regular string"
// }Nested Objects and Arrays:
// Complex nested structure
const traceData = {
observations: [
{
input: {
message: "Process this image",
image: "@@@langfuseMedia:type=image/png|id=img001|source=bytes@@@"
},
output: {
result: "Processed",
thumbnail: "@@@langfuseMedia:type=image/jpeg|id=img002|source=bytes@@@"
},
metadata: {
attachments: [
"@@@langfuseMedia:type=application/pdf|id=doc001|source=bytes@@@",
"@@@langfuseMedia:type=audio/wav|id=aud001|source=bytes@@@"
]
}
}
]
};
// Resolves all media references throughout the nested structure
const resolved = await langfuse.media.resolveReferences({
obj: traceData,
resolveWith: "base64DataUri"
});
// All media references are now base64 data URIs
console.log(resolved.observations[0].input.image);
// "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAA..."
console.log(resolved.observations[0].metadata.attachments[0]);
// "data:application/pdf;base64,JVBERi0xLjcKJeLjz9MKMSAw..."Controlling Traversal Depth:
// Deeply nested structure
const deepObject = {
level1: {
level2: {
level3: {
level4: {
level5: {
media: "@@@langfuseMedia:type=image/jpeg|id=deep001|source=bytes@@@"
}
}
}
}
}
};
// Limit traversal depth
const resolved = await langfuse.media.resolveReferences({
obj: deepObject,
resolveWith: "base64DataUri",
maxDepth: 3 // Will not process beyond level 3
});
// Media at level 5 will not be resolved due to maxDepth limitResolving Trace Data:
// Fetch trace from API (contains media references)
const trace = await langfuse.api.trace.get('trace-id-123');
// Resolve all media references in the trace
const resolvedTrace = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
// Now you can use the resolved media content
if (resolvedTrace.observations) {
for (const obs of resolvedTrace.observations) {
if (obs.input?.image) {
// image is now a base64 data URI, can be displayed in browser
console.log('Image data URI:', obs.input.image);
}
}
}Error Handling:
// If media fetch fails, the reference string remains unchanged
const obj = {
validMedia: "@@@langfuseMedia:type=image/jpeg|id=valid123|source=bytes@@@",
invalidMedia: "@@@langfuseMedia:type=image/jpeg|id=invalid999|source=bytes@@@"
};
const resolved = await langfuse.media.resolveReferences({
obj,
resolveWith: "base64DataUri"
});
// Valid media is resolved
console.log(resolved.validMedia);
// "data:image/jpeg;base64,/9j/4AAQSkZJRg..."
// Invalid media remains as reference (warning logged)
console.log(resolved.invalidMedia);
// "@@@langfuseMedia:type=image/jpeg|id=invalid999|source=bytes@@@"Parse a media reference string into its component parts (static method).
/**
* Parses a media reference string into a ParsedMediaReference object
*
* Media reference string format:
* "@@@langfuseMedia:type=<mime>|id=<id>|source=<source>@@@"
*
* @param referenceString - The reference string to parse
* @returns Parsed media reference with mediaId, source, and contentType
* @throws Error if the reference string is invalid or missing required fields
*/
static parseReferenceString(
referenceString: string
): ParsedMediaReference;
type ParsedMediaReference = {
/** The unique identifier for the media content */
mediaId: string;
/** The source type of the media (e.g., "bytes", "base64_data_uri") */
source: string;
/** The MIME type of the media content */
contentType: MediaContentType;
};Usage Examples:
import { MediaManager } from '@langfuse/client';
// Parse a valid reference string
const referenceString = "@@@langfuseMedia:type=image/jpeg|id=abc-123|source=bytes@@@";
const parsed = MediaManager.parseReferenceString(referenceString);
console.log(parsed);
// {
// mediaId: "abc-123",
// source: "bytes",
// contentType: "image/jpeg"
// }Extracting Information:
// Use parsed information to understand media type
const ref = "@@@langfuseMedia:type=application/pdf|id=doc001|source=base64_data_uri@@@";
const parsed = MediaManager.parseReferenceString(ref);
console.log(`Media ID: ${parsed.mediaId}`); // "doc001"
console.log(`Content Type: ${parsed.contentType}`); // "application/pdf"
console.log(`Source: ${parsed.source}`); // "base64_data_uri"
// Determine media category
if (parsed.contentType.startsWith('image/')) {
console.log('This is an image');
} else if (parsed.contentType.startsWith('audio/')) {
console.log('This is audio');
} else if (parsed.contentType === 'application/pdf') {
console.log('This is a PDF document');
}Error Handling:
// Invalid format - missing prefix
try {
MediaManager.parseReferenceString("type=image/jpeg|id=123|source=bytes@@@");
} catch (error) {
console.error(error.message);
// "Reference string does not start with '@@@langfuseMedia:type='"
}
// Invalid format - missing suffix
try {
MediaManager.parseReferenceString("@@@langfuseMedia:type=image/jpeg|id=123|source=bytes");
} catch (error) {
console.error(error.message);
// "Reference string does not end with '@@@'"
}
// Invalid format - missing required fields
try {
MediaManager.parseReferenceString("@@@langfuseMedia:type=image/jpeg|id=123@@@");
} catch (error) {
console.error(error.message);
// "Missing required fields in reference string"
}Finding References in Strings:
// Extract and parse all media references from a string
const text = `
Image 1: @@@langfuseMedia:type=image/png|id=img1|source=bytes@@@
Image 2: @@@langfuseMedia:type=image/jpeg|id=img2|source=bytes@@@
PDF: @@@langfuseMedia:type=application/pdf|id=doc1|source=bytes@@@
`;
const regex = /@@@langfuseMedia:.+?@@@/g;
const matches = text.match(regex);
if (matches) {
const parsed = matches.map(ref => MediaManager.parseReferenceString(ref));
console.log('Found media references:');
parsed.forEach(p => {
console.log(`- ${p.contentType}: ${p.mediaId}`);
});
// - image/png: img1
// - image/jpeg: img2
// - application/pdf: doc1
}Media reference strings follow a specific format that encodes media metadata:
@@@langfuseMedia:type=<mime-type>|id=<media-id>|source=<source-type>@@@Components:
@@@langfuseMedia:image/jpeg, application/pdf)bytes, base64_data_uri)@@@Examples:
// Image reference
"@@@langfuseMedia:type=image/jpeg|id=abc-123-def|source=bytes@@@"
// PDF reference
"@@@langfuseMedia:type=application/pdf|id=doc-456|source=base64_data_uri@@@"
// Audio reference
"@@@langfuseMedia:type=audio/wav|id=sound-789|source=bytes@@@"
// Video reference
"@@@langfuseMedia:type=video/mp4|id=clip-012|source=bytes@@@"Union type representing all supported MIME types for media content.
type MediaContentType =
// Images
| "image/png"
| "image/jpeg"
| "image/jpg"
| "image/webp"
| "image/gif"
| "image/svg+xml"
| "image/tiff"
| "image/bmp"
// Audio
| "audio/mpeg"
| "audio/mp3"
| "audio/wav"
| "audio/ogg"
| "audio/oga"
| "audio/aac"
| "audio/mp4"
| "audio/flac"
// Video
| "video/mp4"
| "video/webm"
// Text
| "text/plain"
| "text/html"
| "text/css"
| "text/csv"
// Documents
| "application/pdf"
| "application/msword"
| "application/vnd.ms-excel"
// Other
| "application/zip"
| "application/json"
| "application/xml"
| "application/octet-stream";
// Constants for type-safe access
const MediaContentType = {
// Images
ImagePng: "image/png",
ImageJpeg: "image/jpeg",
ImageJpg: "image/jpg",
ImageWebp: "image/webp",
ImageGif: "image/gif",
ImageSvgXml: "image/svg+xml",
ImageTiff: "image/tiff",
ImageBmp: "image/bmp",
// Audio
AudioMpeg: "audio/mpeg",
AudioMp3: "audio/mp3",
AudioWav: "audio/wav",
AudioOgg: "audio/ogg",
AudioOga: "audio/oga",
AudioAac: "audio/aac",
AudioMp4: "audio/mp4",
AudioFlac: "audio/flac",
// Video
VideoMp4: "video/mp4",
VideoWebm: "video/webm",
// Text
TextPlain: "text/plain",
TextHtml: "text/html",
TextCss: "text/css",
TextCsv: "text/csv",
// Documents
ApplicationPdf: "application/pdf",
ApplicationMsword: "application/msword",
ApplicationMsExcel: "application/vnd.ms-excel",
// Other
ApplicationZip: "application/zip",
ApplicationJson: "application/json",
ApplicationXml: "application/xml",
ApplicationOctetStream: "application/octet-stream",
} as const;Result type from parsing a media reference string.
type ParsedMediaReference = {
/** The unique identifier for the media content */
mediaId: string;
/** The source type of the media */
source: string;
/** The MIME type of the media content */
contentType: MediaContentType;
};Parameters for the resolveReferences method.
type LangfuseMediaResolveMediaReferencesParams<T> = {
/** The object to process for media references */
obj: T;
/** The format to resolve media references to */
resolveWith: "base64DataUri";
/** Maximum depth to traverse when processing nested objects (default: 10) */
maxDepth?: number;
};import { LangfuseClient } from '@langfuse/client';
const langfuse = new LangfuseClient();
// Fetch trace with image references
const trace = await langfuse.api.trace.get('trace-123');
// Resolve image references
const resolved = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
// Use resolved images in HTML
if (resolved.observations) {
for (const obs of resolved.observations) {
if (obs.input?.screenshot) {
// Create an img element with the data URI
const img = document.createElement('img');
img.src = obs.input.screenshot; // data:image/png;base64,...
document.body.appendChild(img);
}
}
}// Object with PDF reference
const data = {
document: "@@@langfuseMedia:type=application/pdf|id=doc123|source=bytes@@@"
};
// Resolve to base64 data URI
const resolved = await langfuse.media.resolveReferences({
obj: data,
resolveWith: "base64DataUri"
});
// Use in iframe or download
console.log(resolved.document);
// "data:application/pdf;base64,JVBERi0xLjcK..."
// Display in iframe
const iframe = document.createElement('iframe');
iframe.src = resolved.document;
document.body.appendChild(iframe);// Trace with audio recording reference
const trace = {
input: {
recording: "@@@langfuseMedia:type=audio/wav|id=rec001|source=bytes@@@"
}
};
// Resolve audio reference
const resolved = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
// Play audio in browser
const audio = new Audio(resolved.input.recording);
audio.play();
// Or use in HTML audio element
const audioElement = document.createElement('audio');
audioElement.src = resolved.input.recording;
audioElement.controls = true;
document.body.appendChild(audioElement);// Video reference in observation
const observation = {
output: {
generatedVideo: "@@@langfuseMedia:type=video/mp4|id=vid001|source=bytes@@@"
}
};
// Resolve video
const resolved = await langfuse.media.resolveReferences({
obj: observation,
resolveWith: "base64DataUri"
});
// Display video
const video = document.createElement('video');
video.src = resolved.output.generatedVideo;
video.controls = true;
video.width = 640;
document.body.appendChild(video);// Fetch multiple traces
const traces = await langfuse.api.trace.list({ limit: 50 });
// Resolve all media references across all traces
const resolvedTraces = await langfuse.media.resolveReferences({
obj: traces,
resolveWith: "base64DataUri"
});
// Process each trace with resolved media
for (const trace of resolvedTraces.data) {
console.log(`Processing trace: ${trace.id}`);
// Access resolved media content
if (trace.observations) {
for (const obs of trace.observations) {
if (obs.input?.image?.startsWith('data:image/')) {
console.log('Found resolved image');
}
}
}
}// Parse references first to decide what to resolve
const obj = {
largeVideo: "@@@langfuseMedia:type=video/mp4|id=large001|source=bytes@@@",
smallImage: "@@@langfuseMedia:type=image/jpeg|id=small001|source=bytes@@@"
};
// Extract reference strings
const videoRef = obj.largeVideo;
const imageRef = obj.smallImage;
// Parse to check content type
const videoParsed = MediaManager.parseReferenceString(videoRef);
const imageParsed = MediaManager.parseReferenceString(imageRef);
// Only resolve images (skip large videos)
const filtered = {
smallImage: obj.smallImage,
// Exclude largeVideo
};
const resolved = await langfuse.media.resolveReferences({
obj: filtered,
resolveWith: "base64DataUri"
});
console.log('Resolved only small image, skipped large video');Be mindful of memory usage when resolving large media files or processing many objects:
// Good: Process in batches
const traces = await langfuse.api.trace.list({ limit: 10 });
const resolved = await langfuse.media.resolveReferences({
obj: traces,
resolveWith: "base64DataUri"
});
// Less ideal: Processing too many large files at once
// const manyTraces = await langfuse.api.trace.list({ limit: 1000 });
// May cause memory issues if traces contain large mediaThe resolveReferences method handles individual fetch failures gracefully:
// Some references may fail to fetch, but the method continues
const obj = {
validImage: "@@@langfuseMedia:type=image/jpeg|id=valid|source=bytes@@@",
missingImage: "@@@langfuseMedia:type=image/jpeg|id=deleted|source=bytes@@@",
anotherValidImage: "@@@langfuseMedia:type=image/png|id=valid2|source=bytes@@@"
};
const resolved = await langfuse.media.resolveReferences({
obj,
resolveWith: "base64DataUri"
});
// Valid images are resolved, invalid ones remain as references
// Warnings are logged for failed fetchesConsider caching resolved media to avoid repeated fetches:
const cache = new Map<string, string>();
async function getResolvedMedia(referenceString: string): Promise<string> {
if (cache.has(referenceString)) {
return cache.get(referenceString)!;
}
const resolved = await langfuse.media.resolveReferences({
obj: { media: referenceString },
resolveWith: "base64DataUri"
});
cache.set(referenceString, resolved.media);
return resolved.media;
}
// Use cached resolver
const image1 = await getResolvedMedia(
"@@@langfuseMedia:type=image/jpeg|id=abc|source=bytes@@@"
);
const image2 = await getResolvedMedia(
"@@@langfuseMedia:type=image/jpeg|id=abc|source=bytes@@@" // Cache hit
);Use TypeScript generics to maintain type information:
interface TraceWithMedia {
id: string;
observations: Array<{
input?: {
image?: string;
};
}>;
}
// Type is preserved through resolution
const trace: TraceWithMedia = await langfuse.api.trace.get('trace-123');
const resolved: TraceWithMedia = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
// TypeScript knows the structure
console.log(resolved.observations[0].input?.image);Use maxDepth to prevent excessive recursion on deeply nested structures:
// Limit traversal for performance
const resolved = await langfuse.media.resolveReferences({
obj: veryNestedObject,
resolveWith: "base64DataUri",
maxDepth: 5 // Don't traverse deeper than 5 levels
});// React example
import { LangfuseClient } from '@langfuse/client';
import { useState, useEffect } from 'react';
function TraceViewer({ traceId }: { traceId: string }) {
const [resolvedTrace, setResolvedTrace] = useState(null);
const langfuse = new LangfuseClient();
useEffect(() => {
async function loadTrace() {
const trace = await langfuse.api.trace.get(traceId);
const resolved = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
setResolvedTrace(resolved);
}
loadTrace();
}, [traceId]);
if (!resolvedTrace) return <div>Loading...</div>;
return (
<div>
{resolvedTrace.observations?.map((obs, idx) => (
<div key={idx}>
{obs.input?.image && (
<img src={obs.input.image} alt="Trace input" />
)}
</div>
))}
</div>
);
}// Node.js server example
import { LangfuseClient } from '@langfuse/client';
import express from 'express';
const app = express();
const langfuse = new LangfuseClient();
app.get('/api/trace/:traceId', async (req, res) => {
try {
const trace = await langfuse.api.trace.get(req.params.traceId);
// Resolve media references before sending to client
const resolved = await langfuse.media.resolveReferences({
obj: trace,
resolveWith: "base64DataUri"
});
res.json(resolved);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
app.listen(3000);// ETL pipeline example
import { LangfuseClient } from '@langfuse/client';
const langfuse = new LangfuseClient();
async function exportTracesWithMedia() {
const traces = await langfuse.api.trace.list({ limit: 100 });
// Resolve all media in batch
const resolved = await langfuse.media.resolveReferences({
obj: traces,
resolveWith: "base64DataUri"
});
// Export to file or database with resolved media
for (const trace of resolved.data) {
await saveToDatabase(trace); // Media is now embedded as base64
}
}Install with Tessl CLI
npx tessl i tessl/npm-langfuse--client