Core functions and utilities for Langfuse packages including API client, logging, media handling, and OpenTelemetry tracing attributes
SDK identifiers and standardized OpenTelemetry span attribute keys for integrating Langfuse tracing with OpenTelemetry instrumentation.
Core constants identifying the Langfuse SDK and tracer.
const LANGFUSE_TRACER_NAME: string;
const LANGFUSE_SDK_VERSION: string;
const LANGFUSE_SDK_NAME: string;Import:
import {
LANGFUSE_TRACER_NAME,
LANGFUSE_SDK_VERSION,
LANGFUSE_SDK_NAME
} from '@langfuse/core';The name identifier for the Langfuse tracer used in OpenTelemetry instrumentation.
const LANGFUSE_TRACER_NAME: string; // Value: "langfuse-sdk"Usage Example:
import { trace } from '@opentelemetry/api';
import { LANGFUSE_TRACER_NAME } from '@langfuse/core';
const tracer = trace.getTracer(LANGFUSE_TRACER_NAME);
const span = tracer.startSpan('operation-name');The current version of the Langfuse SDK, dynamically loaded from package.json.
const LANGFUSE_SDK_VERSION: string; // Value: "4.2.0" (current version)Usage Example:
import { LANGFUSE_SDK_VERSION } from '@langfuse/core';
console.log(`Langfuse SDK version: ${LANGFUSE_SDK_VERSION}`);
// Used in API client headers
const client = new LangfuseAPIClient({
environment: 'https://cloud.langfuse.com',
xLangfuseSdkVersion: LANGFUSE_SDK_VERSION
});The platform/language identifier for the SDK.
const LANGFUSE_SDK_NAME: string; // Value: "javascript"Usage Example:
import { LANGFUSE_SDK_NAME } from '@langfuse/core';
const client = new LangfuseAPIClient({
environment: 'https://cloud.langfuse.com',
xLangfuseSdkName: LANGFUSE_SDK_NAME
});Standardized attribute keys for annotating OpenTelemetry spans with Langfuse trace and observation metadata.
enum LangfuseOtelSpanAttributes {
// Trace attributes
TRACE_NAME = "langfuse.trace.name",
TRACE_USER_ID = "user.id",
TRACE_SESSION_ID = "session.id",
TRACE_TAGS = "langfuse.trace.tags",
TRACE_PUBLIC = "langfuse.trace.public",
TRACE_METADATA = "langfuse.trace.metadata",
TRACE_INPUT = "langfuse.trace.input",
TRACE_OUTPUT = "langfuse.trace.output",
// Observation attributes
OBSERVATION_TYPE = "langfuse.observation.type",
OBSERVATION_METADATA = "langfuse.observation.metadata",
OBSERVATION_LEVEL = "langfuse.observation.level",
OBSERVATION_STATUS_MESSAGE = "langfuse.observation.status_message",
OBSERVATION_INPUT = "langfuse.observation.input",
OBSERVATION_OUTPUT = "langfuse.observation.output",
// Generation attributes (for LLM generations)
OBSERVATION_COMPLETION_START_TIME = "langfuse.observation.completion_start_time",
OBSERVATION_MODEL = "langfuse.observation.model.name",
OBSERVATION_MODEL_PARAMETERS = "langfuse.observation.model.parameters",
OBSERVATION_USAGE_DETAILS = "langfuse.observation.usage_details",
OBSERVATION_COST_DETAILS = "langfuse.observation.cost_details",
OBSERVATION_PROMPT_NAME = "langfuse.observation.prompt.name",
OBSERVATION_PROMPT_VERSION = "langfuse.observation.prompt.version",
// General attributes
ENVIRONMENT = "langfuse.environment",
RELEASE = "langfuse.release",
VERSION = "langfuse.version",
// Internal attributes
AS_ROOT = "langfuse.internal.as_root",
// Compatibility attributes (legacy)
TRACE_COMPAT_USER_ID = "langfuse.user.id",
TRACE_COMPAT_SESSION_ID = "langfuse.session.id"
}Import:
import { LangfuseOtelSpanAttributes } from '@langfuse/core';Attributes for trace-level metadata and lifecycle information.
enum LangfuseOtelSpanAttributes {
TRACE_NAME = "langfuse.trace.name", // Name of the trace
TRACE_USER_ID = "user.id", // User identifier (OpenTelemetry standard)
TRACE_SESSION_ID = "session.id", // Session identifier (OpenTelemetry standard)
TRACE_TAGS = "langfuse.trace.tags", // Array of tags
TRACE_PUBLIC = "langfuse.trace.public", // Public visibility flag (boolean)
TRACE_METADATA = "langfuse.trace.metadata", // Trace metadata (JSON object)
TRACE_INPUT = "langfuse.trace.input", // Trace input data (JSON)
TRACE_OUTPUT = "langfuse.trace.output", // Trace output data (JSON)
}Usage Example:
import { trace } from '@opentelemetry/api';
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
const span = tracer.startSpan('user-request');
// Set trace attributes
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_NAME, 'chat-completion');
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_USER_ID, 'user-123');
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_SESSION_ID, 'session-456');
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_TAGS, JSON.stringify(['production', 'api']));
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_PUBLIC, false);
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_METADATA, JSON.stringify({
region: 'us-east-1',
version: 'v2'
}));
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_INPUT, JSON.stringify({
prompt: 'Hello, world!'
}));
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_OUTPUT, JSON.stringify({
response: 'Hi there!'
}));Attributes for observation-level metadata (spans, generations, events).
enum LangfuseOtelSpanAttributes {
OBSERVATION_TYPE = "langfuse.observation.type", // Type (SPAN, GENERATION, EVENT, etc.)
OBSERVATION_METADATA = "langfuse.observation.metadata", // Observation metadata (JSON)
OBSERVATION_LEVEL = "langfuse.observation.level", // Level (DEBUG, DEFAULT, WARNING, ERROR)
OBSERVATION_STATUS_MESSAGE = "langfuse.observation.status_message", // Status message
OBSERVATION_INPUT = "langfuse.observation.input", // Input data (JSON)
OBSERVATION_OUTPUT = "langfuse.observation.output", // Output data (JSON)
}Usage Example:
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
// Set observation attributes
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_TYPE, 'GENERATION');
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_LEVEL, 'DEFAULT');
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_METADATA, JSON.stringify({
retryCount: 0,
cacheHit: false
}));
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_INPUT, JSON.stringify({
messages: [{ role: 'user', content: 'Hello' }]
}));
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_OUTPUT, JSON.stringify({
message: { role: 'assistant', content: 'Hi!' }
}));
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_STATUS_MESSAGE, 'Success');Specialized attributes for LLM generation observations.
enum LangfuseOtelSpanAttributes {
OBSERVATION_COMPLETION_START_TIME = "langfuse.observation.completion_start_time", // ISO 8601 timestamp
OBSERVATION_MODEL = "langfuse.observation.model.name", // Model name (e.g., "gpt-4")
OBSERVATION_MODEL_PARAMETERS = "langfuse.observation.model.parameters", // Model parameters (JSON)
OBSERVATION_USAGE_DETAILS = "langfuse.observation.usage_details", // Usage metrics (JSON)
OBSERVATION_COST_DETAILS = "langfuse.observation.cost_details", // Cost metrics (JSON)
OBSERVATION_PROMPT_NAME = "langfuse.observation.prompt.name", // Prompt template name
OBSERVATION_PROMPT_VERSION = "langfuse.observation.prompt.version", // Prompt version number
}Usage Example:
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
// LLM generation tracking
const generationSpan = tracer.startSpan('openai-completion');
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_TYPE, 'GENERATION');
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_MODEL, 'gpt-4-turbo');
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_COMPLETION_START_TIME,
new Date().toISOString()
);
// Model parameters
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_MODEL_PARAMETERS,
JSON.stringify({
temperature: 0.7,
max_tokens: 1000,
top_p: 1
})
);
// Usage tracking
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_USAGE_DETAILS,
JSON.stringify({
promptTokens: 50,
completionTokens: 100,
totalTokens: 150
})
);
// Cost tracking
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_COST_DETAILS,
JSON.stringify({
promptCost: 0.0005,
completionCost: 0.002,
totalCost: 0.0025
})
);
// Prompt tracking
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_PROMPT_NAME, 'chat-assistant');
generationSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_PROMPT_VERSION, '3');Environment and release identification attributes.
enum LangfuseOtelSpanAttributes {
ENVIRONMENT = "langfuse.environment", // Environment identifier (e.g., "production")
RELEASE = "langfuse.release", // Release identifier (e.g., "v1.2.3")
VERSION = "langfuse.version", // Version identifier
}Usage Example:
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
// Set environment and release
span.setAttribute(LangfuseOtelSpanAttributes.ENVIRONMENT, 'production');
span.setAttribute(LangfuseOtelSpanAttributes.RELEASE, 'v1.2.3');
span.setAttribute(LangfuseOtelSpanAttributes.VERSION, '2024.10');Note: Environment identifiers must be lowercase alphanumeric with hyphens/underscores and cannot start with 'langfuse'.
Internal attributes for SDK implementation details.
enum LangfuseOtelSpanAttributes {
AS_ROOT = "langfuse.internal.as_root", // Internal flag for root spans (boolean)
}Legacy attribute keys for backward compatibility.
enum LangfuseOtelSpanAttributes {
TRACE_COMPAT_USER_ID = "langfuse.user.id", // Legacy user ID attribute
TRACE_COMPAT_SESSION_ID = "langfuse.session.id", // Legacy session ID attribute
}Note: Prefer the standard OpenTelemetry attributes (TRACE_USER_ID, TRACE_SESSION_ID) for new implementations.
import { trace } from '@opentelemetry/api';
import {
LANGFUSE_TRACER_NAME,
LangfuseOtelSpanAttributes
} from '@langfuse/core';
const tracer = trace.getTracer(LANGFUSE_TRACER_NAME);
async function handleUserRequest(userId: string, sessionId: string, input: any) {
const span = tracer.startSpan('user-request');
try {
// Set trace-level attributes
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_NAME, 'user-chat-request');
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_USER_ID, userId);
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_SESSION_ID, sessionId);
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_TAGS, JSON.stringify(['chat', 'api']));
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_INPUT, JSON.stringify(input));
span.setAttribute(LangfuseOtelSpanAttributes.ENVIRONMENT, 'production');
span.setAttribute(LangfuseOtelSpanAttributes.RELEASE, 'v1.2.3');
// Process request...
const output = await processRequest(input);
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_OUTPUT, JSON.stringify(output));
span.end();
return output;
} catch (error) {
span.recordException(error);
span.end();
throw error;
}
}import { trace } from '@opentelemetry/api';
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
async function trackLLMGeneration(prompt: string) {
const tracer = trace.getTracer('langfuse-sdk');
const span = tracer.startSpan('llm-generation');
// Mark as generation type
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_TYPE, 'GENERATION');
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_MODEL, 'gpt-4');
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_COMPLETION_START_TIME,
new Date().toISOString()
);
// Model configuration
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_MODEL_PARAMETERS,
JSON.stringify({
temperature: 0.7,
max_tokens: 1000
})
);
// Input
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_INPUT,
JSON.stringify({ messages: [{ role: 'user', content: prompt }] })
);
try {
// Call LLM...
const response = await callOpenAI(prompt);
// Output
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_OUTPUT,
JSON.stringify({ message: response })
);
// Usage and cost
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_USAGE_DETAILS,
JSON.stringify({
promptTokens: response.usage.prompt_tokens,
completionTokens: response.usage.completion_tokens,
totalTokens: response.usage.total_tokens
})
);
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_COST_DETAILS,
JSON.stringify({
promptCost: response.usage.prompt_tokens * 0.00001,
completionCost: response.usage.completion_tokens * 0.00003,
totalCost: (response.usage.prompt_tokens * 0.00001) +
(response.usage.completion_tokens * 0.00003)
})
);
span.end();
return response;
} catch (error) {
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_LEVEL, 'ERROR');
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_STATUS_MESSAGE,
error.message
);
span.recordException(error);
span.end();
throw error;
}
}import { trace } from '@opentelemetry/api';
import { LangfuseOtelSpanAttributes } from '@langfuse/core';
const tracer = trace.getTracer('langfuse-sdk');
async function processWithSteps(input: any) {
// Root span (trace)
const rootSpan = tracer.startSpan('processing-pipeline');
rootSpan.setAttribute(LangfuseOtelSpanAttributes.TRACE_NAME, 'data-pipeline');
return trace.with(trace.setSpan(trace.active(), rootSpan), async () => {
// Child span 1
const retrieveSpan = tracer.startSpan('retrieve-data');
retrieveSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_TYPE, 'SPAN');
const data = await retrieveData(input);
retrieveSpan.end();
// Child span 2 (LLM generation)
const generateSpan = tracer.startSpan('generate-response');
generateSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_TYPE, 'GENERATION');
generateSpan.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_MODEL, 'gpt-4');
const response = await generateResponse(data);
generateSpan.end();
rootSpan.end();
return response;
});
}import { LangfuseOtelSpanAttributes } from '@langfuse/core';
// Rich metadata
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_METADATA, JSON.stringify({
userId: 'user-123',
region: 'us-east-1',
tier: 'premium',
requestId: 'req-456',
features: ['feature-a', 'feature-b']
}));
// Tags for filtering
span.setAttribute(LangfuseOtelSpanAttributes.TRACE_TAGS,
JSON.stringify(['production', 'api', 'v2'])
);
// Observation metadata
span.setAttribute(LangfuseOtelSpanAttributes.OBSERVATION_METADATA, JSON.stringify({
cacheHit: false,
retryCount: 0,
processingTime: 1250
}));const LANGFUSE_TRACER_NAME: string;
const LANGFUSE_SDK_VERSION: string;
const LANGFUSE_SDK_NAME: string;
enum LangfuseOtelSpanAttributes {
TRACE_NAME = "langfuse.trace.name",
TRACE_USER_ID = "user.id",
TRACE_SESSION_ID = "session.id",
TRACE_TAGS = "langfuse.trace.tags",
TRACE_PUBLIC = "langfuse.trace.public",
TRACE_METADATA = "langfuse.trace.metadata",
TRACE_INPUT = "langfuse.trace.input",
TRACE_OUTPUT = "langfuse.trace.output",
OBSERVATION_TYPE = "langfuse.observation.type",
OBSERVATION_METADATA = "langfuse.observation.metadata",
OBSERVATION_LEVEL = "langfuse.observation.level",
OBSERVATION_STATUS_MESSAGE = "langfuse.observation.status_message",
OBSERVATION_INPUT = "langfuse.observation.input",
OBSERVATION_OUTPUT = "langfuse.observation.output",
OBSERVATION_COMPLETION_START_TIME = "langfuse.observation.completion_start_time",
OBSERVATION_MODEL = "langfuse.observation.model.name",
OBSERVATION_MODEL_PARAMETERS = "langfuse.observation.model.parameters",
OBSERVATION_USAGE_DETAILS = "langfuse.observation.usage_details",
OBSERVATION_COST_DETAILS = "langfuse.observation.cost_details",
OBSERVATION_PROMPT_NAME = "langfuse.observation.prompt.name",
OBSERVATION_PROMPT_VERSION = "langfuse.observation.prompt.version",
ENVIRONMENT = "langfuse.environment",
RELEASE = "langfuse.release",
VERSION = "langfuse.version",
AS_ROOT = "langfuse.internal.as_root",
TRACE_COMPAT_USER_ID = "langfuse.user.id",
TRACE_COMPAT_SESSION_ID = "langfuse.session.id"
}TRACE_USER_ID and TRACE_SESSION_ID over compatibility versionsOBSERVATION_COST_DETAILS for accurate billingOBSERVATION_USAGE_DETAILSOBSERVATION_LEVEL to 'ERROR' and include OBSERVATION_STATUS_MESSAGE for failuresOBSERVATION_PROMPT_NAME and OBSERVATION_PROMPT_VERSION for generationsOBSERVATION_TYPE appropriately (SPAN, GENERATION, EVENT, etc.)Install with Tessl CLI
npx tessl i tessl/npm-langfuse--core