0
# Azure Integration
1
2
Complete Azure OpenAI service support with custom endpoints, API versions, and authentication methods including Azure AD. Provides the same functionality as OpenAI models but deployed on Azure infrastructure.
3
4
## Capabilities
5
6
### AzureChatOpenAI Class
7
8
Azure OpenAI chat model integration with all the features of ChatOpenAI plus Azure-specific configuration.
9
10
```typescript { .api }
11
/**
12
* Azure OpenAI chat model integration
13
* Extends ChatOpenAI with Azure-specific configuration and authentication
14
*/
15
class AzureChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions>
16
extends ChatOpenAI<CallOptions> {
17
18
constructor(fields?: AzureOpenAIInput & ChatOpenAIFields);
19
20
/** Azure-specific configuration */
21
azureOpenAIApiVersion?: string; // Azure API version (e.g., "2024-02-01")
22
azureOpenAIApiKey?: string; // Azure API key
23
azureADTokenProvider?: () => Promise<string>; // Azure AD token provider function
24
azureOpenAIApiInstanceName?: string; // Azure instance name
25
azureOpenAIApiDeploymentName?: string; // Azure deployment name
26
azureOpenAIBasePath?: string; // Azure base path
27
azureOpenAIEndpoint?: string; // Azure endpoint URL
28
29
/** Inherited from ChatOpenAI */
30
invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<BaseMessage>;
31
stream(input: BaseLanguageModelInput, options?: CallOptions): AsyncIterable<BaseMessageChunk>;
32
bindTools(tools: ChatOpenAIToolType[], kwargs?: Partial<CallOptions>): Runnable;
33
withStructuredOutput<T>(outputSchema: z.ZodType<T> | Record<string, any>, config?: any): Runnable<BaseLanguageModelInput, T>;
34
}
35
```
36
37
### AzureOpenAI LLM Class
38
39
Azure OpenAI text completion model wrapper for legacy completions API.
40
41
```typescript { .api }
42
/**
43
* Azure OpenAI text completion model wrapper
44
* Extends OpenAI LLM with Azure-specific configuration
45
*/
46
class AzureOpenAI extends OpenAI {
47
48
constructor(fields?: AzureOpenAIInput);
49
50
/** Azure configuration inherited from AzureOpenAIInput */
51
azureOpenAIApiVersion?: string;
52
azureOpenAIApiKey?: string;
53
azureADTokenProvider?: () => Promise<string>;
54
azureOpenAIApiInstanceName?: string;
55
azureOpenAIApiDeploymentName?: string;
56
azureOpenAIBasePath?: string;
57
azureOpenAIEndpoint?: string;
58
59
/** Inherited LLM functionality */
60
_generate(prompts: string[], options: OpenAICallOptions): Promise<LLMResult>;
61
_streamResponseChunks(input: string, options: OpenAICallOptions): AsyncIterable<GenerationChunk>;
62
}
63
```
64
65
### AzureOpenAIEmbeddings Class
66
67
Azure OpenAI embeddings integration with deployment-specific configuration.
68
69
```typescript { .api }
70
/**
71
* Azure OpenAI embeddings integration
72
* Extends OpenAIEmbeddings with Azure-specific configuration and deployment names
73
*/
74
class AzureOpenAIEmbeddings extends OpenAIEmbeddings {
75
76
constructor(fields?: AzureOpenAIInput & Partial<OpenAIEmbeddingsParams>);
77
78
/** Azure-specific embedding configuration */
79
azureOpenAIApiEmbeddingsDeploymentName?: string; // Specific deployment for embeddings
80
azureOpenAIApiCompletionsDeploymentName?: string; // Completions deployment fallback
81
82
/** Inherited embedding functionality */
83
embedDocuments(texts: string[]): Promise<number[][]>;
84
embedQuery(text: string): Promise<number[]>;
85
}
86
```
87
88
### Azure Configuration Interface
89
90
Configuration interface for Azure OpenAI services.
91
92
```typescript { .api }
93
interface AzureOpenAIInput extends OpenAIBaseInput {
94
/** Azure API version (required) */
95
azureOpenAIApiVersion?: string;
96
97
/** Azure API key */
98
azureOpenAIApiKey?: string;
99
100
/** Azure AD token provider for authentication */
101
azureADTokenProvider?: () => Promise<string>;
102
103
/** Azure OpenAI instance name */
104
azureOpenAIApiInstanceName?: string;
105
106
/** Azure deployment name */
107
azureOpenAIApiDeploymentName?: string;
108
109
/** Azure base path for custom endpoints */
110
azureOpenAIBasePath?: string;
111
112
/** Azure endpoint URL */
113
azureOpenAIEndpoint?: string;
114
115
/** Embeddings-specific deployment name */
116
azureOpenAIApiEmbeddingsDeploymentName?: string;
117
118
/** Completions-specific deployment name */
119
azureOpenAIApiCompletionsDeploymentName?: string;
120
}
121
```
122
123
## Usage Examples
124
125
### Basic Azure Chat Model Setup
126
127
```typescript
128
import { AzureChatOpenAI } from "@langchain/openai";
129
130
// Method 1: Using instance name and deployment
131
const azureModel = new AzureChatOpenAI({
132
azureOpenAIApiKey: "your-azure-key",
133
azureOpenAIApiInstanceName: "your-instance-name",
134
azureOpenAIApiDeploymentName: "your-deployment-name",
135
azureOpenAIApiVersion: "2024-02-01",
136
temperature: 0.7,
137
maxTokens: 1000
138
});
139
140
// Method 2: Using direct endpoint
141
const azureModelDirect = new AzureChatOpenAI({
142
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
143
azureOpenAIEndpoint: "https://your-instance.openai.azure.com/",
144
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
145
azureOpenAIApiVersion: "2024-02-01"
146
});
147
148
// Basic usage - same as ChatOpenAI
149
const response = await azureModel.invoke("Hello, how are you?");
150
console.log(response.content);
151
```
152
153
### Environment Variables Configuration
154
155
```typescript
156
// Set these environment variables:
157
// AZURE_OPENAI_API_KEY=your-azure-key
158
// AZURE_OPENAI_API_INSTANCE_NAME=your-instance
159
// AZURE_OPENAI_API_DEPLOYMENT_NAME=your-deployment
160
// AZURE_OPENAI_API_VERSION=2024-02-01
161
162
import { AzureChatOpenAI } from "@langchain/openai";
163
164
// Configuration will be loaded from environment variables
165
const azureModel = new AzureChatOpenAI({
166
temperature: 0.5
167
// Azure config loaded from env vars automatically
168
});
169
170
const result = await azureModel.invoke("What is Azure OpenAI?");
171
```
172
173
### Azure AD Authentication
174
175
```typescript
176
import { DefaultAzureCredential } from "@azure/identity";
177
178
// Using Azure AD with managed identity
179
async function createAzureADTokenProvider() {
180
const credential = new DefaultAzureCredential();
181
182
return async () => {
183
const token = await credential.getToken("https://cognitiveservices.azure.com/.default");
184
return token.token;
185
};
186
}
187
188
const azureModelWithAD = new AzureChatOpenAI({
189
azureADTokenProvider: await createAzureADTokenProvider(),
190
azureOpenAIApiInstanceName: "your-instance",
191
azureOpenAIApiDeploymentName: "your-deployment",
192
azureOpenAIApiVersion: "2024-02-01"
193
});
194
195
const response = await azureModelWithAD.invoke("Hello from Azure AD authenticated model!");
196
```
197
198
### Multiple Deployments Configuration
199
200
```typescript
201
// Configure different models for different use cases
202
const chatModel = new AzureChatOpenAI({
203
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
204
azureOpenAIApiInstanceName: "my-instance",
205
azureOpenAIApiDeploymentName: "gpt-4o-deployment", // For chat
206
azureOpenAIApiVersion: "2024-02-01",
207
temperature: 0.3
208
});
209
210
const creativeModel = new AzureChatOpenAI({
211
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
212
azureOpenAIApiInstanceName: "my-instance",
213
azureOpenAIApiDeploymentName: "gpt-4o-creative", // Different deployment
214
azureOpenAIApiVersion: "2024-02-01",
215
temperature: 0.9
216
});
217
218
// Use different models for different tasks
219
const factualResponse = await chatModel.invoke("What is the capital of France?");
220
const creativeResponse = await creativeModel.invoke("Write a poem about Paris");
221
```
222
223
### Azure Embeddings Setup
224
225
```typescript
226
import { AzureOpenAIEmbeddings } from "@langchain/openai";
227
228
const azureEmbeddings = new AzureOpenAIEmbeddings({
229
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
230
azureOpenAIApiInstanceName: "my-instance",
231
azureOpenAIApiEmbeddingsDeploymentName: "text-embedding-ada-002", // Specific embedding deployment
232
azureOpenAIApiVersion: "2024-02-01",
233
batchSize: 512
234
});
235
236
// Usage same as OpenAIEmbeddings
237
const query = "What is machine learning?";
238
const queryEmbedding = await azureEmbeddings.embedQuery(query);
239
240
const documents = [
241
"Machine learning is a subset of AI",
242
"Deep learning uses neural networks",
243
"Natural language processing handles text"
244
];
245
const docEmbeddings = await azureEmbeddings.embedDocuments(documents);
246
```
247
248
### Azure LLM (Legacy Completions)
249
250
```typescript
251
import { AzureOpenAI } from "@langchain/openai";
252
253
const azureLLM = new AzureOpenAI({
254
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
255
azureOpenAIApiInstanceName: "my-instance",
256
azureOpenAIApiDeploymentName: "text-davinci-003-deployment",
257
azureOpenAIApiVersion: "2024-02-01",
258
temperature: 0.7,
259
maxTokens: 200
260
});
261
262
// Text completion
263
const completion = await azureLLM.invoke("The future of artificial intelligence is");
264
console.log(completion);
265
```
266
267
### Custom Endpoint Configuration
268
269
```typescript
270
// For custom Azure endpoints or special configurations
271
const customAzureModel = new AzureChatOpenAI({
272
azureOpenAIApiKey: "your-key",
273
azureOpenAIBasePath: "/openai/deployments/your-deployment",
274
azureOpenAIEndpoint: "https://your-custom-endpoint.com",
275
azureOpenAIApiVersion: "2024-02-01",
276
// No deployment name needed when using basePath
277
});
278
279
// Alternative with full URL construction
280
const fullUrlModel = new AzureChatOpenAI({
281
azureOpenAIApiKey: "your-key",
282
baseURL: "https://your-instance.openai.azure.com/openai/deployments/your-deployment",
283
azureOpenAIApiVersion: "2024-02-01"
284
});
285
```
286
287
### Streaming with Azure Models
288
289
```typescript
290
const azureStreamingModel = new AzureChatOpenAI({
291
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
292
azureOpenAIApiInstanceName: "my-instance",
293
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
294
azureOpenAIApiVersion: "2024-02-01",
295
streaming: true,
296
temperature: 0.5
297
});
298
299
// Stream responses
300
const stream = await azureStreamingModel.stream("Tell me a story about a robot");
301
302
for await (const chunk of stream) {
303
process.stdout.write(chunk.content);
304
}
305
```
306
307
### Tool Calling with Azure
308
309
```typescript
310
import { z } from "zod";
311
312
const azureToolModel = new AzureChatOpenAI({
313
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
314
azureOpenAIApiInstanceName: "my-instance",
315
azureOpenAIApiDeploymentName: "gpt-4o-tools", // Deployment supporting tools
316
azureOpenAIApiVersion: "2024-02-01",
317
temperature: 0
318
});
319
320
const tools = [
321
{
322
name: "search_database",
323
description: "Search company database for information",
324
schema: z.object({
325
query: z.string().describe("Search query"),
326
department: z.string().optional().describe("Department to search in")
327
})
328
}
329
];
330
331
const modelWithTools = azureToolModel.bindTools(tools);
332
const response = await modelWithTools.invoke("Find information about sales performance");
333
334
// Handle tool calls
335
if (response.tool_calls) {
336
console.log("Model wants to call tools:", response.tool_calls);
337
}
338
```
339
340
### Structured Output with Azure
341
342
```typescript
343
import { z } from "zod";
344
345
const ResponseSchema = z.object({
346
summary: z.string().describe("Brief summary"),
347
keyPoints: z.array(z.string()).describe("Main points"),
348
sentiment: z.enum(["positive", "negative", "neutral"]).describe("Overall sentiment"),
349
confidence: z.number().min(0).max(1).describe("Confidence score")
350
});
351
352
const azureStructuredModel = new AzureChatOpenAI({
353
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
354
azureOpenAIApiInstanceName: "my-instance",
355
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
356
azureOpenAIApiVersion: "2024-02-01"
357
});
358
359
const structuredModel = azureStructuredModel.withStructuredOutput(ResponseSchema, {
360
name: "DocumentAnalysis"
361
});
362
363
const analysis = await structuredModel.invoke(`
364
Analyze this customer feedback: "I love the new features in your app!
365
The user interface is much cleaner and the performance is significantly better.
366
However, I wish there were more customization options available."
367
`);
368
369
console.log(analysis);
370
// Output: { summary: "...", keyPoints: [...], sentiment: "positive", confidence: 0.85 }
371
```
372
373
## Environment Variables
374
375
### Required Environment Variables
376
377
```bash
378
# Basic Azure OpenAI configuration
379
AZURE_OPENAI_API_KEY=your-azure-api-key
380
AZURE_OPENAI_API_INSTANCE_NAME=your-instance-name
381
AZURE_OPENAI_API_DEPLOYMENT_NAME=your-deployment-name
382
AZURE_OPENAI_API_VERSION=2024-02-01
383
384
# Alternative endpoint configuration
385
AZURE_OPENAI_ENDPOINT=https://your-instance.openai.azure.com/
386
AZURE_OPENAI_BASE_PATH=/openai/deployments/your-deployment
387
388
# Specialized deployments
389
AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME=text-embedding-deployment
390
AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME=completions-deployment
391
```
392
393
### Environment Variable Priority
394
395
The Azure utilities use this priority order for configuration:
396
397
1. **Direct constructor parameters** (highest priority)
398
2. **Environment variables**
399
3. **Default values** (lowest priority)
400
401
```typescript
402
// This will use constructor parameter over environment variable
403
const model = new AzureChatOpenAI({
404
azureOpenAIApiKey: "constructor-key", // Used
405
// Even if AZURE_OPENAI_API_KEY is set in environment
406
});
407
408
// This will use environment variable
409
const model2 = new AzureChatOpenAI({
410
// azureOpenAIApiKey not specified, will use AZURE_OPENAI_API_KEY from env
411
temperature: 0.7
412
});
413
```
414
415
## Azure Endpoint Construction
416
417
### Endpoint URL Priority
418
419
Azure utilities construct endpoints in this order:
420
421
1. **azureOpenAIBasePath + azureOpenAIApiDeploymentName**
422
2. **azureOpenAIEndpoint + azureOpenAIApiDeploymentName**
423
3. **azureOpenAIApiInstanceName + azureOpenAIApiDeploymentName**
424
4. **Custom baseURL** (if provided)
425
426
```typescript
427
import { getEndpoint } from "@langchain/openai";
428
429
// Example endpoint construction
430
const config = {
431
azureOpenAIApiInstanceName: "my-instance",
432
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
433
azureOpenAIApiVersion: "2024-02-01"
434
};
435
436
const endpoint = getEndpoint(config);
437
// Result: "https://my-instance.openai.azure.com/openai/deployments/gpt-4o-deployment"
438
```
439
440
## Error Handling
441
442
### Azure-Specific Error Handling
443
444
```typescript
445
import { AzureChatOpenAI } from "@langchain/openai";
446
447
const azureModel = new AzureChatOpenAI({
448
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
449
azureOpenAIApiInstanceName: "my-instance",
450
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
451
azureOpenAIApiVersion: "2024-02-01",
452
maxRetries: 3,
453
timeout: 60000
454
});
455
456
try {
457
const response = await azureModel.invoke("Hello!");
458
} catch (error) {
459
console.error("Azure OpenAI error:", error);
460
461
// Common Azure-specific errors
462
if (error.message.includes('DeploymentNotFound')) {
463
console.error("Deployment not found - check deployment name");
464
} else if (error.message.includes('InvalidApiVersionParameter')) {
465
console.error("Invalid API version - check azureOpenAIApiVersion");
466
} else if (error.message.includes('Unauthorized')) {
467
console.error("Authentication failed - check API key or Azure AD token");
468
}
469
}
470
```
471
472
## Best Practices
473
474
### Deployment Management
475
476
```typescript
477
// Create a factory for consistent Azure model creation
478
class AzureModelFactory {
479
private baseConfig: Partial<AzureOpenAIInput>;
480
481
constructor(baseConfig: Partial<AzureOpenAIInput>) {
482
this.baseConfig = {
483
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
484
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME,
485
azureOpenAIApiVersion: "2024-02-01",
486
...baseConfig
487
};
488
}
489
490
createChatModel(deploymentName: string, options?: Partial<AzureOpenAIInput>) {
491
return new AzureChatOpenAI({
492
...this.baseConfig,
493
azureOpenAIApiDeploymentName: deploymentName,
494
...options
495
});
496
}
497
498
createEmbeddings(deploymentName: string) {
499
return new AzureOpenAIEmbeddings({
500
...this.baseConfig,
501
azureOpenAIApiEmbeddingsDeploymentName: deploymentName
502
});
503
}
504
}
505
506
// Usage
507
const factory = new AzureModelFactory({
508
azureOpenAIApiInstanceName: "my-instance"
509
});
510
511
const chatModel = factory.createChatModel("gpt-4o-deployment", { temperature: 0.3 });
512
const embeddings = factory.createEmbeddings("text-embedding-deployment");
513
```
514
515
### Configuration Validation
516
517
```typescript
518
function validateAzureConfig(config: Partial<AzureOpenAIInput>): void {
519
const requiredFields = ['azureOpenAIApiVersion'];
520
const authFields = ['azureOpenAIApiKey', 'azureADTokenProvider'];
521
const endpointFields = [
522
'azureOpenAIApiInstanceName',
523
'azureOpenAIEndpoint',
524
'azureOpenAIBasePath'
525
];
526
527
// Check required fields
528
for (const field of requiredFields) {
529
if (!config[field as keyof AzureOpenAIInput]) {
530
throw new Error(`Missing required field: ${field}`);
531
}
532
}
533
534
// Check authentication
535
const hasAuth = authFields.some(field => config[field as keyof AzureOpenAIInput]);
536
if (!hasAuth) {
537
throw new Error("Must provide either azureOpenAIApiKey or azureADTokenProvider");
538
}
539
540
// Check endpoint configuration
541
const hasEndpoint = endpointFields.some(field => config[field as keyof AzureOpenAIInput]);
542
if (!hasEndpoint) {
543
throw new Error("Must provide endpoint configuration");
544
}
545
}
546
547
// Usage
548
const config = {
549
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
550
azureOpenAIApiInstanceName: "my-instance",
551
azureOpenAIApiVersion: "2024-02-01"
552
};
553
554
validateAzureConfig(config);
555
const model = new AzureChatOpenAI(config);
556
```
557
558
### Multi-Region Setup
559
560
```typescript
561
// Support multiple Azure regions for reliability
562
class MultiRegionAzureModel {
563
private regions: AzureChatOpenAI[] = [];
564
private currentRegionIndex = 0;
565
566
constructor(regionConfigs: AzureOpenAIInput[]) {
567
this.regions = regionConfigs.map(config => new AzureChatOpenAI(config));
568
}
569
570
async invoke(input: string, maxRetries = 3): Promise<BaseMessage> {
571
let lastError: Error | null = null;
572
573
for (let attempt = 0; attempt < maxRetries; attempt++) {
574
const model = this.regions[this.currentRegionIndex];
575
576
try {
577
return await model.invoke(input);
578
} catch (error) {
579
lastError = error as Error;
580
console.warn(`Region ${this.currentRegionIndex} failed:`, error.message);
581
582
// Switch to next region
583
this.currentRegionIndex = (this.currentRegionIndex + 1) % this.regions.length;
584
}
585
}
586
587
throw new Error(`All regions failed. Last error: ${lastError?.message}`);
588
}
589
}
590
591
// Usage
592
const multiRegionModel = new MultiRegionAzureModel([
593
{
594
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
595
azureOpenAIApiInstanceName: "us-east-instance",
596
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
597
azureOpenAIApiVersion: "2024-02-01"
598
},
599
{
600
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
601
azureOpenAIApiInstanceName: "eu-west-instance",
602
azureOpenAIApiDeploymentName: "gpt-4o-deployment",
603
azureOpenAIApiVersion: "2024-02-01"
604
}
605
]);
606
607
const response = await multiRegionModel.invoke("Hello from multi-region setup!");
608
```