tessl install tessl/npm-langsmith@0.4.3TypeScript client SDK for the LangSmith LLM tracing, evaluation, and monitoring platform.
SDK wrappers provide automatic tracing for popular AI/ML SDKs and libraries by instrumenting their method calls. The LangSmith SDK includes generic wrapper functionality along with specialized wrappers for OpenAI and other providers.
langsmith (npm)npm install langsmith openailangsmith/wrappers and langsmith/wrappers/openaiThe generic SDK wrapper provides automatic tracing for any SDK or object by creating a proxy that intercepts method calls.
Creates a traced wrapper around any SDK object.
/**
* Wrap arbitrary SDK for automatic tracing of method calls
* @param sdk - The SDK object to wrap (can be any object with methods)
* @param options - Configuration options for the wrapper
* @returns Wrapped SDK with identical interface but traced method calls
* @template T - The type of the SDK object being wrapped
*/
function wrapSDK<T extends object>(sdk: T, options?: WrapSDKOptions): T;Options for configuring the generic SDK wrapper.
interface WrapSDKOptions {
/**
* Name for the wrapper (appears in trace names)
* @default "wrapped-sdk"
*/
name?: string;
/**
* LangSmith client instance to use for tracing
* @default Uses default client from environment
*/
client?: Client;
/**
* Project name where traces will be logged
* @default Uses LANGCHAIN_PROJECT or auto-generated name
*/
projectName?: string;
/**
* Run name for traced operations
* @deprecated Use `name` instead
* @default Uses method name
*/
runName?: string;
/**
* Additional metadata to attach to all traces
*/
metadata?: KVMap;
/**
* Tags to attach to all traces
*/
tags?: string[];
}import { wrapSDK } from "langsmith/wrappers";
import { Client } from "langsmith";
// Basic wrapping
const sdk = wrapSDK(mySDK);
// With custom client
const client = new Client({
apiKey: process.env.LANGSMITH_API_KEY,
});
const tracedSDK = wrapSDK(mySDK, {
client,
projectName: "my-sdk-traces",
});
// With full configuration
const fullyConfiguredSDK = wrapSDK(mySDK, {
name: "analytics-sdk",
projectName: "production-analytics",
runName: "data-processing",
metadata: {
version: "2.0",
environment: "production",
region: "us-west-2",
},
tags: ["analytics", "production", "critical"],
});
// Nested SDK wrapping
const wrappedClient = wrapSDK(client, {
name: "api-client",
projectName: "api-traces",
});
// Wrap multiple SDKs with different configs
const openaiSDK = wrapSDK(openaiClient, {
name: "openai",
projectName: "llm-calls",
tags: ["llm", "openai"],
});
const anthropicSDK = wrapSDK(anthropicClient, {
name: "anthropic",
projectName: "llm-calls",
tags: ["llm", "anthropic"],
});The generic SDK wrapper:
import { wrapSDK } from "langsmith/wrappers";
// Wrap a custom SDK
const mySDK = {
async processData(input: string) {
// ... SDK logic
return { result: "processed" };
},
async analyzeText(text: string) {
// ... SDK logic
return { sentiment: "positive" };
},
};
const wrapped = wrapSDK(mySDK, {
name: "custom-sdk",
projectName: "my-analytics",
metadata: { version: "1.0" },
tags: ["production"],
});
// All method calls are automatically traced
const result = await wrapped.processData("Hello world");
const analysis = await wrapped.analyzeText("Great work!");Automatic tracing for the OpenAI SDK with proper handling of completions, chat, embeddings, and streaming.
The OpenAI wrapper provides specialized tracing for the official OpenAI SDK. Wrap your OpenAI client once and all subsequent API calls are automatically traced to LangSmith.
import { wrapOpenAI } from "langsmith/wrappers/openai";
import OpenAI from "openai";For CommonJS:
const { wrapOpenAI } = require("langsmith/wrappers/openai");
const OpenAI = require("openai");For the generic wrapper:
import { wrapSDK } from "langsmith/wrappers";For CommonJS:
const { wrapSDK } = require("langsmith/wrappers");import { wrapOpenAI } from "langsmith/wrappers/openai";
import OpenAI from "openai";
// Wrap OpenAI client
const openai = wrapOpenAI(new OpenAI(), {
projectName: "openai-project"
});
// All calls automatically traced
const response = await openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: "Hello!" }]
});/**
* Wrap OpenAI SDK for automatic tracing
* @param openai - OpenAI client instance
* @param options - Wrapper configuration
* @returns Wrapped OpenAI client
*/
function wrapOpenAI(openai: OpenAI, options?: WrapOpenAIOptions): OpenAI;
interface WrapOpenAIOptions {
/** Name for wrapper (default: "openai") */
name?: string;
/** LangSmith client */
client?: Client;
/** Project name */
projectName?: string;
/**
* Run name
* @deprecated Use `name` instead
*/
runName?: string;
/** Metadata */
metadata?: KVMap;
/** Tags */
tags?: string[];
}import { wrapOpenAI } from "langsmith/wrappers/openai";
import OpenAI from "openai";
const openai = wrapOpenAI(new OpenAI(), {
projectName: "chat-app"
});
// Standard completion
const response = await openai.chat.completions.create({
model: "gpt-4",
messages: [
{ role: "system", content: "You are helpful." },
{ role: "user", content: "Hello!" }
]
});
// Streaming
const stream = await openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: "Tell me a joke" }],
stream: true
});
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) console.log(content);
}import { wrapOpenAI } from "langsmith/wrappers/openai";
import OpenAI from "openai";
const openai = wrapOpenAI(new OpenAI(), {
projectName: "embeddings",
tags: ["embeddings"]
});
// Create embeddings
const response = await openai.embeddings.create({
model: "text-embedding-ada-002",
input: "The quick brown fox"
});
const embedding = response.data[0].embedding;
// Batch embeddings
const batch = await openai.embeddings.create({
model: "text-embedding-ada-002",
input: ["Doc 1", "Doc 2", "Doc 3"]
});import { wrapOpenAI } from "langsmith/wrappers/openai";
import { Client } from "langsmith";
import OpenAI from "openai";
const lsClient = new Client({
apiKey: process.env.LANGSMITH_API_KEY
});
const openai = wrapOpenAI(
new OpenAI({
apiKey: process.env.OPENAI_API_KEY
}),
{
client: lsClient,
projectName: "production-chat",
runName: "customer-support",
metadata: {
version: "2.1.0",
deployment: "us-east-1"
},
tags: ["openai", "gpt-4", "production"]
}
);The wrapper automatically captures:
import { traceable } from "langsmith/traceable";
import { wrapOpenAI } from "langsmith/wrappers/openai";
const openai = wrapOpenAI(new OpenAI());
const processQuery = traceable(
async (query: string) => {
// OpenAI call traced as child
const response = await openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: query }]
});
return response.choices[0].message.content;
},
{ name: "processQuery", run_type: "chain" }
);
// Creates nested trace
await processQuery("What is AI?");// Good: Create once, reuse
const openai = wrapOpenAI(new OpenAI(), {
projectName: "my-app"
});
async function chat(message: string) {
return openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: message }]
});
}
// Bad: Create wrapper every call
async function chatBad(message: string) {
const openai = wrapOpenAI(new OpenAI()); // Don't do this
return openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: message }]
});
}const openai = wrapOpenAI(new OpenAI());
try {
const response = await openai.chat.completions.create({
model: "gpt-4",
messages: [{ role: "user", content: "Hello" }]
});
} catch (error) {
// Error automatically logged to trace
console.error("OpenAI call failed:", error);
}