OpenAI integrations for LangChain.js providing chat models, embeddings, tools, and Azure support.
npx @tessl/cli install tessl/npm-langchain--openai@0.6.0The @langchain/openai package provides comprehensive LangChain integrations for OpenAI's APIs, including chat models, language models, embeddings, and specialized tools. It supports both OpenAI and Azure OpenAI services with advanced features like structured output, tool calling, streaming, and multimodal interactions.
npm install @langchain/openaiimport { ChatOpenAI, OpenAI, OpenAIEmbeddings, DallEAPIWrapper } from "@langchain/openai";
import { AzureChatOpenAI, AzureOpenAI, AzureOpenAIEmbeddings } from "@langchain/openai";
import { customTool, convertPromptToOpenAI } from "@langchain/openai";
import { OpenAIClient, type ClientOptions, toFile } from "@langchain/openai";For CommonJS:
const { ChatOpenAI, OpenAI, OpenAIEmbeddings, DallEAPIWrapper } = require("@langchain/openai");
const { AzureChatOpenAI, AzureOpenAI, AzureOpenAIEmbeddings } = require("@langchain/openai");
const { customTool, convertPromptToOpenAI } = require("@langchain/openai");
const { OpenAIClient, ClientOptions, toFile } = require("@langchain/openai");import { ChatOpenAI } from "@langchain/openai";
import { z } from "zod";
// Basic chat model
const chatModel = new ChatOpenAI({
model: "gpt-4o-mini",
temperature: 0,
apiKey: process.env.OPENAI_API_KEY
});
const response = await chatModel.invoke("What is the capital of France?");
console.log(response.content);
// With tool binding
const tools = [
{
name: "get_weather",
description: "Get current weather for a location",
schema: z.object({
location: z.string().describe("City name")
})
}
];
const modelWithTools = chatModel.bindTools(tools);
const result = await modelWithTools.invoke("What's the weather in Paris?");
// Structured output
const structuredModel = chatModel.withStructuredOutput(
z.object({
answer: z.string(),
confidence: z.number().min(0).max(1)
}),
{ name: "Response" }
);
const structured = await structuredModel.invoke("Explain quantum computing briefly");The @langchain/openai package is built around several key components:
Modern conversational AI models supporting streaming, tools, structured output, and multimodal interactions. Built on OpenAI's Chat Completions API.
class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> {
constructor(fields?: ChatOpenAIFields & Partial<ChatOpenAIFields>);
/** Generate a single response */
invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<BaseMessage>;
/** Stream response tokens */
stream(input: BaseLanguageModelInput, options?: CallOptions): AsyncIterable<BaseMessageChunk>;
/** Bind tools to the model */
bindTools(tools: ChatOpenAIToolType[], kwargs?: Partial<CallOptions>): Runnable;
/** Enable structured output with schema validation */
withStructuredOutput<T>(
outputSchema: z.ZodType<T> | Record<string, any>,
config?: { name?: string; description?: string; method?: "functionCalling" | "jsonMode" }
): Runnable<BaseLanguageModelInput, T>;
}Traditional text completion models for legacy workflows and specific use cases requiring the completions API.
class OpenAI<CallOptions extends OpenAICallOptions = OpenAICallOptions> {
constructor(fields?: OpenAIInput & Partial<OpenAIInput>);
/** Generate text completions */
_generate(prompts: string[], options: CallOptions): Promise<LLMResult>;
/** Stream response chunks */
_streamResponseChunks(input: string, options: CallOptions): AsyncIterable<GenerationChunk>;
}Vector embeddings for semantic similarity, search, and retrieval-augmented generation (RAG) applications.
class OpenAIEmbeddings {
constructor(fields?: Partial<OpenAIEmbeddingsParams>);
/** Embed multiple documents */
embedDocuments(texts: string[]): Promise<number[][]>;
/** Embed a single query */
embedQuery(text: string): Promise<number[]>;
}Complete Azure OpenAI service support with custom endpoints, API versions, and authentication methods including Azure AD.
class AzureChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions>
extends ChatOpenAI<CallOptions> {
constructor(fields?: AzureOpenAIInput & ChatOpenAIFields);
}Image generation with DALL-E and custom tool creation for the OpenAI Responses API.
class DallEAPIWrapper {
constructor(fields?: Partial<DallEAPIWrapperParams>);
/** Generate images from text descriptions */
_call(input: string): Promise<string>;
}
/** Create custom tools for Responses API */
function customTool<T extends Record<string, any>>(
func: RunnableFunc<string, string, ToolRunnableConfig>,
fields: CustomToolFields<T>
): CustomTool<T>;Comprehensive type system covering all configuration options, call parameters, and response formats.
interface ChatOpenAICallOptions extends BaseChatOpenAICallOptions {
tools?: ChatOpenAIToolType[];
tool_choice?: OpenAIToolChoice | ResponsesToolChoice;
response_format?: ChatOpenAIResponseFormat;
seed?: number;
stream_options?: OpenAIClient.Chat.ChatCompletionStreamOptions;
parallel_tool_calls?: boolean;
strict?: boolean;
modalities?: Array<OpenAIClient.Chat.ChatCompletionModality>;
audio?: OpenAIClient.Chat.ChatCompletionAudioParam;
}Utility functions for interoperability and advanced use cases.
/**
* Convert LangChain prompt to OpenAI format
* Useful for direct OpenAI SDK interoperability
*/
function convertPromptToOpenAI(formattedPrompt: BasePromptValue): {
messages: OpenAI.Chat.ChatCompletionMessageParam[];
};import { convertPromptToOpenAI } from "@langchain/openai";
import { pull } from "langchain/hub";
import OpenAI from 'openai';
// Pull a prompt from LangChain Hub
const prompt = await pull("jacob/joke-generator");
const formattedPrompt = await prompt.invoke({
topic: "cats",
});
// Convert to OpenAI format
const { messages } = convertPromptToOpenAI(formattedPrompt);
// Use directly with OpenAI SDK
const openAIClient = new OpenAI();
const openaiResponse = await openAIClient.chat.completions.create({
model: "gpt-4o-mini",
messages,
});