or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

agents.mdchains.mddocument-loaders.mdembeddings.mdexperimental.mdindex.mdmemory.mdoutput-parsers.mdretrievers.mdtools.mdutilities.md
tile.json

utilities.mddocs/

0

# Utilities and Integrations

1

2

Helper functions, LangSmith integration, Hub connectivity, storage utilities, evaluation tools, and serialization capabilities for building robust LangChain applications.

3

4

## Capabilities

5

6

### LangChain Hub Integration

7

8

Access to the LangChain Hub for sharing and discovering prompts, chains, and other components.

9

10

```typescript { .api }

11

/**

12

* Pull a prompt or component from LangChain Hub

13

*/

14

function pull<T>(

15

ownerRepoCommit: string,

16

options?: {

17

apiKey?: string;

18

apiUrl?: string;

19

includeModel?: boolean;

20

modelClass?: any;

21

}

22

): Promise<T>;

23

24

/**

25

* Push content to LangChain Hub

26

*/

27

function push(

28

repoFullName: string,

29

object: any,

30

options?: {

31

apiKey?: string;

32

apiUrl?: string;

33

parentCommitHash?: string;

34

}

35

): Promise<string>;

36

```

37

38

**Usage Example:**

39

40

```typescript

41

import { pull } from "langchain/hub";

42

import { ChatPromptTemplate } from "@langchain/core/prompts";

43

44

// Pull a chat prompt template from the hub

45

const prompt = await pull<ChatPromptTemplate>("hwchase17/react-chat");

46

47

// Use the prompt in your application

48

const formattedPrompt = await prompt.format({

49

agent_scratchpad: "",

50

input: "What is the weather like today?",

51

tools: toolsDescription

52

});

53

54

console.log(formattedPrompt);

55

```

56

57

### LangSmith Integration

58

59

Integration with LangSmith for observability, evaluation, and monitoring of LangChain applications.

60

61

```typescript { .api }

62

/**

63

* Run evaluation on a LangSmith dataset

64

*/

65

function runOnDataset(params: RunOnDatasetParams): Promise<EvalResults>;

66

67

interface RunOnDatasetParams {

68

/** Name of the dataset to evaluate on */

69

datasetName: string;

70

71

/** Factory function for creating LLM or chain */

72

llmOrChainFactory: () => BaseLanguageModelInterface | BaseChain;

73

74

/** Evaluation criteria to apply */

75

evaluation?: EvaluatorType[];

76

77

/** Number of concurrent evaluations */

78

concurrency?: number;

79

80

/** Project name for organizing results */

81

projectName?: string;

82

83

/** Additional project metadata */

84

projectMetadata?: Record<string, any>;

85

86

/** Client configuration */

87

client?: LangSmithClient;

88

89

/** Number of repetitions per example */

90

numRepetitions?: number;

91

92

/** Maximum number of examples to evaluate */

93

maxExamples?: number;

94

}

95

96

interface EvalResults {

97

/** Project name */

98

projectName: string;

99

100

/** Individual evaluation results */

101

results: EvaluationResult[];

102

103

/** Aggregate statistics */

104

aggregateStats: Record<string, number>;

105

}

106

107

interface EvaluationResult {

108

/** Example ID */

109

exampleId: string;

110

111

/** Model outputs */

112

outputs: Record<string, any>;

113

114

/** Evaluation scores */

115

scores: Record<string, number>;

116

117

/** Feedback and comments */

118

feedback: EvaluationFeedback[];

119

120

/** Execution metadata */

121

metadata: Record<string, any>;

122

}

123

```

124

125

**Usage Example:**

126

127

```typescript

128

import { runOnDataset } from "langchain/smith";

129

import { LLMChain } from "langchain/chains";

130

import { ChatOpenAI } from "@langchain/openai";

131

import { ChatPromptTemplate } from "@langchain/core/prompts";

132

133

// Define chain factory

134

const chainFactory = () => {

135

const llm = new ChatOpenAI({ temperature: 0 });

136

const prompt = ChatPromptTemplate.fromMessages([

137

["system", "You are a helpful assistant."],

138

["human", "{input}"]

139

]);

140

return new LLMChain({ llm, prompt });

141

};

142

143

// Run evaluation

144

const results = await runOnDataset({

145

datasetName: "my-qa-dataset",

146

llmOrChainFactory: chainFactory,

147

evaluation: ["qa", "coherence", "relevance"],

148

concurrency: 4,

149

projectName: "qa-model-evaluation",

150

projectMetadata: {

151

model: "gpt-3.5-turbo",

152

version: "1.0.0",

153

temperature: 0

154

}

155

});

156

157

console.log(`Evaluation completed for ${results.results.length} examples`);

158

console.log(`Average accuracy: ${results.aggregateStats.accuracy}`);

159

```

160

161

### Storage Systems

162

163

Various storage backends for persisting data, embeddings, and application state.

164

165

```typescript { .api }

166

/**

167

* Local file system storage

168

*/

169

class LocalFileStore extends BaseStore<string, Uint8Array> {

170

constructor(rootPath: string);

171

172

/** Root directory path */

173

rootPath: string;

174

175

/** Get multiple values by keys */

176

mget(keys: string[]): Promise<(Uint8Array | undefined)[]>;

177

178

/** Set multiple key-value pairs */

179

mset(keyValuePairs: [string, Uint8Array][]): Promise<void>;

180

181

/** Delete multiple keys */

182

mdelete(keys: string[]): Promise<void>;

183

184

/** Yield keys with optional prefix */

185

yieldKeys(prefix?: string): AsyncGenerator<string>;

186

187

/** Create from file path */

188

static fromPath(rootPath: string): Promise<LocalFileStore>;

189

}

190

191

/**

192

* In-memory storage implementation

193

*/

194

class InMemoryStore<K, V> extends BaseStore<K, V> {

195

constructor();

196

197

/** Internal storage map */

198

store: Map<K, V>;

199

200

mget(keys: K[]): Promise<(V | undefined)[]>;

201

mset(keyValuePairs: [K, V][]): Promise<void>;

202

mdelete(keys: K[]): Promise<void>;

203

yieldKeys(prefix?: string): AsyncGenerator<K>;

204

}

205

206

/**

207

* Encoder-backed storage with automatic encoding/decoding

208

*/

209

class EncoderBackedStore<K, V> extends BaseStore<K, V> {

210

constructor(fields: {

211

store: BaseStore<K, string>;

212

keyEncoder: (key: K) => K;

213

valueSerializer: (value: V) => string;

214

valueDeserializer: (value: string) => V;

215

});

216

217

/** Underlying storage */

218

store: BaseStore<K, string>;

219

220

/** Key encoding function */

221

keyEncoder: (key: K) => K;

222

223

/** Value serialization function */

224

valueSerializer: (value: V) => string;

225

226

/** Value deserialization function */

227

valueDeserializer: (value: string) => V;

228

229

mget(keys: K[]): Promise<(V | undefined)[]>;

230

mset(keyValuePairs: [K, V][]): Promise<void>;

231

mdelete(keys: K[]): Promise<void>;

232

yieldKeys(prefix?: string): AsyncGenerator<K>;

233

}

234

```

235

236

**Usage Example:**

237

238

```typescript

239

import {

240

LocalFileStore,

241

InMemoryStore,

242

EncoderBackedStore

243

} from "langchain/storage";

244

245

// File-based storage

246

const fileStore = await LocalFileStore.fromPath("./app_data");

247

await fileStore.mset([["user:123", new TextEncoder().encode("user data")]]);

248

const userData = await fileStore.mget(["user:123"]);

249

250

// In-memory storage

251

const memoryStore = new InMemoryStore<string, object>();

252

await memoryStore.mset([

253

["config", { theme: "dark", language: "en" }],

254

["cache", { lastUpdate: Date.now() }]

255

]);

256

257

// Encoder-backed storage for automatic JSON serialization

258

const jsonStore = new EncoderBackedStore({

259

store: fileStore,

260

keyEncoder: (key: string) => `json:${key}`,

261

valueSerializer: (value: object) => JSON.stringify(value),

262

valueDeserializer: (value: string) => JSON.parse(value)

263

});

264

265

await jsonStore.mset([["user", { name: "Alice", age: 30 }]]);

266

const user = await jsonStore.mget(["user"]);

267

```

268

269

### Document and Message Stores

270

271

Specialized stores for documents and chat messages.

272

273

```typescript { .api }

274

/**

275

* In-memory document store

276

*/

277

class InMemoryDocstore extends BaseDocstore {

278

constructor(docs?: Record<string, DocumentInterface>);

279

280

/** Internal document storage */

281

docs: Record<string, DocumentInterface>;

282

283

search(term: string): Promise<DocumentInterface>;

284

add(docs: Record<string, DocumentInterface>): Promise<void>;

285

}

286

287

/**

288

* In-memory message store

289

*/

290

class ChatMessageHistory extends BaseListChatMessageHistory {

291

constructor(messages?: BaseMessageInterface[]);

292

293

/** Array of stored messages */

294

messages: BaseMessageInterface[];

295

296

getMessages(): Promise<BaseMessageInterface[]>;

297

addMessage(message: BaseMessageInterface): Promise<void>;

298

addUserMessage(message: string): Promise<void>;

299

addAIChatMessage(message: string): Promise<void>;

300

clear(): Promise<void>;

301

}

302

303

/**

304

* File-based message store

305

*/

306

class FileChatMessageHistory extends BaseListChatMessageHistory {

307

constructor(filePath: string);

308

309

/** File path for message storage */

310

filePath: string;

311

312

getMessages(): Promise<BaseMessageInterface[]>;

313

addMessage(message: BaseMessageInterface): Promise<void>;

314

clear(): Promise<void>;

315

}

316

```

317

318

### Caching Systems

319

320

Caching implementations for improving performance and reducing API calls.

321

322

```typescript { .api }

323

/**

324

* File system-based cache

325

*/

326

class FileSystemCache extends BaseCache {

327

constructor(cacheDir?: string);

328

329

/** Cache directory path */

330

cacheDir: string;

331

332

lookup(prompt: string, llmKey: string): Promise<Generation[] | null>;

333

update(prompt: string, llmKey: string, value: Generation[]): Promise<void>;

334

clear(): Promise<void>;

335

}

336

337

/**

338

* In-memory cache implementation

339

*/

340

class InMemoryCache extends BaseCache {

341

constructor();

342

343

/** Internal cache storage */

344

cache: Map<string, Generation[]>;

345

346

lookup(prompt: string, llmKey: string): Promise<Generation[] | null>;

347

update(prompt: string, llmKey: string, value: Generation[]): Promise<void>;

348

clear(): Promise<void>;

349

}

350

```

351

352

### Serialization and Loading

353

354

Utilities for serializing and loading LangChain components.

355

356

```typescript { .api }

357

/**

358

* Load a LangChain component from serialized representation

359

*/

360

function load<T>(

361

text: string,

362

secretsMap?: Record<string, any>,

363

optionalImportsMap?: OptionalImportMap,

364

additionalImportsMap?: Record<string, any>

365

): Promise<T>;

366

367

/**

368

* Load chain from hub or serialized format

369

*/

370

function loadChain(

371

uri: string,

372

options?: {

373

llm?: BaseLanguageModelInterface;

374

verbose?: boolean;

375

}

376

): Promise<BaseChain>;

377

378

/**

379

* Load agent from serialized format

380

*/

381

function loadAgent(

382

uri: string,

383

options?: {

384

llm?: BaseLanguageModelInterface;

385

tools?: BaseTool[];

386

verbose?: boolean;

387

}

388

): Promise<BaseAgent>;

389

390

/**

391

* Serializable base class for LangChain components

392

*/

393

abstract class Serializable {

394

/** Serialization namespace */

395

abstract lc_namespace: string[];

396

397

/** Serializable attributes */

398

lc_serializable?: boolean;

399

400

/** Secrets to exclude from serialization */

401

lc_secrets?: { [key: string]: string };

402

403

/** Attributes to exclude from serialization */

404

lc_attributes?: { [key: string]: any };

405

406

/** Get serializable constructor parameters */

407

get lc_kwargs(): SerializedFields;

408

409

/** Convert to serialized representation */

410

toJSON(): Serialized;

411

412

/** Convert to JSON string */

413

toJSONString(): string;

414

}

415

```

416

417

**Usage Example:**

418

419

```typescript

420

import { load } from "langchain/load";

421

422

// Serialize a chain

423

const chain = new LLMChain({ llm, prompt });

424

const serialized = chain.toJSONString();

425

426

// Save to file or database

427

await fs.writeFile("chain.json", serialized);

428

429

// Later, load the chain back

430

const chainData = await fs.readFile("chain.json", "utf-8");

431

const loadedChain = await load<LLMChain>(chainData);

432

433

// Use the loaded chain

434

const result = await loadedChain.call({ input: "test" });

435

```

436

437

### Evaluation Tools

438

439

Tools for evaluating and testing LangChain applications.

440

441

```typescript { .api }

442

/**

443

* QA evaluation chain

444

*/

445

class QAEvalChain extends BaseChain {

446

constructor(fields: QAEvalChainInput);

447

448

/** LLM for evaluation */

449

llm: BaseLanguageModelInterface;

450

451

/** Evaluation prompt */

452

prompt: BasePromptTemplate;

453

454

_call(values: ChainValues): Promise<ChainValues>;

455

456

static fromLLM(

457

llm: BaseLanguageModelInterface,

458

options?: Partial<QAEvalChainInput>

459

): QAEvalChain;

460

}

461

462

/**

463

* Criteria evaluation chain

464

*/

465

class CriteriaEvalChain extends BaseChain {

466

constructor(fields: CriteriaEvalChainInput);

467

468

/** Evaluation criteria */

469

criteria: EvaluationCriteria;

470

471

/** LLM for evaluation */

472

llm: BaseLanguageModelInterface;

473

474

_call(values: ChainValues): Promise<ChainValues>;

475

476

static fromLLM(

477

llm: BaseLanguageModelInterface,

478

criteria: EvaluationCriteria | string,

479

options?: Partial<CriteriaEvalChainInput>

480

): CriteriaEvalChain;

481

}

482

483

/**

484

* Embedding distance evaluator

485

*/

486

class EmbeddingDistanceEvalChain extends BaseChain {

487

constructor(fields: EmbeddingDistanceEvalChainInput);

488

489

/** Embeddings model */

490

embeddings: EmbeddingsInterface;

491

492

/** Distance metric */

493

distanceMetric: DistanceMetric;

494

495

_call(values: ChainValues): Promise<ChainValues>;

496

497

static fromEmbeddings(

498

embeddings: EmbeddingsInterface,

499

options?: Partial<EmbeddingDistanceEvalChainInput>

500

): EmbeddingDistanceEvalChain;

501

}

502

```

503

504

### Utility Functions

505

506

Helper functions for common operations and calculations.

507

508

```typescript { .api }

509

/**

510

* Document utility functions

511

*/

512

namespace DocumentUtils {

513

/** Combine multiple documents into one */

514

function combineDocuments(

515

docs: DocumentInterface[],

516

separator?: string

517

): string;

518

519

/** Format documents with metadata */

520

function formatDocuments(

521

docs: DocumentInterface[],

522

options?: {

523

includeMetadata?: boolean;

524

metadataTemplate?: string;

525

}

526

): string[];

527

528

/** Filter documents by metadata */

529

function filterDocuments(

530

docs: DocumentInterface[],

531

filter: (doc: DocumentInterface) => boolean

532

): DocumentInterface[];

533

}

534

535

/**

536

* Math utility functions

537

*/

538

namespace MathUtils {

539

/** Calculate cosine similarity between vectors */

540

function cosineSimilarity(a: number[], b: number[]): number;

541

542

/** Calculate dot product of vectors */

543

function dotProduct(a: number[], b: number[]): number;

544

545

/** Normalize vector to unit length */

546

function normalize(vector: number[]): number[];

547

548

/** Calculate Euclidean distance */

549

function euclideanDistance(a: number[], b: number[]): number;

550

}

551

552

/**

553

* Time utility functions

554

*/

555

namespace TimeUtils {

556

/** Format duration in human-readable format */

557

function formatDuration(milliseconds: number): string;

558

559

/** Get current timestamp */

560

function getCurrentTimestamp(): number;

561

562

/** Parse ISO date string */

563

function parseISODate(dateString: string): Date;

564

565

/** Format date for logging */

566

function formatLogDate(date: Date): string;

567

}

568

```

569

570

### SQL Database Integration

571

572

Utilities for working with SQL databases in LangChain applications.

573

574

```typescript { .api }

575

/**

576

* SQL database connection and utilities

577

*/

578

class SqlDatabase {

579

constructor(fields: SqlDatabaseFields);

580

581

/** Database connection */

582

db: Database;

583

584

/** Database engine type */

585

engine: DatabaseEngine;

586

587

/** Include table information */

588

includesTables: string[];

589

590

/** Exclude table information */

591

ignoreTables: string[];

592

593

/** Sample rows per table */

594

sampleRowsInTableInfo: number;

595

596

/** Custom table information */

597

customTableInfo?: Record<string, string>;

598

599

/** Get table information */

600

getTableInfo(targetTables?: string[]): Promise<string>;

601

602

/** Run SQL query */

603

run(command: string): Promise<string>;

604

605

/** Get table names */

606

allTables(): Promise<string[]>;

607

608

/** Get column information for table */

609

getTableColumns(table: string): Promise<ColumnInfo[]>;

610

611

static fromUri(

612

uri: string,

613

options?: Partial<SqlDatabaseFields>

614

): Promise<SqlDatabase>;

615

}

616

617

interface ColumnInfo {

618

name: string;

619

type: string;

620

nullable: boolean;

621

default?: any;

622

}

623

624

type DatabaseEngine = "postgres" | "mysql" | "sqlite" | "mssql" | "oracle";

625

```

626

627

### Callback System

628

629

Comprehensive callback system for monitoring and debugging LangChain applications.

630

631

```typescript { .api }

632

/**

633

* Base callback handler

634

*/

635

abstract class BaseCallbackHandler {

636

/** Handler name */

637

name?: string;

638

639

/** Ignore LLM callbacks */

640

ignoreLLM?: boolean;

641

642

/** Ignore chain callbacks */

643

ignoreChain?: boolean;

644

645

/** Ignore agent callbacks */

646

ignoreAgent?: boolean;

647

648

/** Ignore retriever callbacks */

649

ignoreRetriever?: boolean;

650

651

/** Handle LLM start */

652

handleLLMStart?(

653

llm: { name: string },

654

prompts: string[],

655

runId: string,

656

parentRunId?: string,

657

extraParams?: Record<string, unknown>

658

): Promise<void>;

659

660

/** Handle LLM end */

661

handleLLMEnd?(output: LLMResult, runId: string): Promise<void>;

662

663

/** Handle LLM error */

664

handleLLMError?(error: Error, runId: string): Promise<void>;

665

666

/** Handle chain start */

667

handleChainStart?(

668

chain: { name: string },

669

inputs: ChainValues,

670

runId: string,

671

parentRunId?: string

672

): Promise<void>;

673

674

/** Handle chain end */

675

handleChainEnd?(outputs: ChainValues, runId: string): Promise<void>;

676

677

/** Handle chain error */

678

handleChainError?(error: Error, runId: string): Promise<void>;

679

680

/** Handle tool start */

681

handleToolStart?(

682

tool: { name: string },

683

input: string,

684

runId: string,

685

parentRunId?: string

686

): Promise<void>;

687

688

/** Handle tool end */

689

handleToolEnd?(output: string, runId: string): Promise<void>;

690

691

/** Handle tool error */

692

handleToolError?(error: Error, runId: string): Promise<void>;

693

}

694

695

/**

696

* Console callback handler for logging

697

*/

698

class ConsoleCallbackHandler extends BaseCallbackHandler {

699

constructor();

700

701

handleLLMStart(llm: any, prompts: string[], runId: string): Promise<void>;

702

handleLLMEnd(output: LLMResult, runId: string): Promise<void>;

703

handleChainStart(chain: any, inputs: ChainValues, runId: string): Promise<void>;

704

handleChainEnd(outputs: ChainValues, runId: string): Promise<void>;

705

}

706

707

/**

708

* LangSmith callback handler

709

*/

710

class LangChainTracer extends BaseCallbackHandler {

711

constructor(fields?: TracerFields);

712

713

/** LangSmith client */

714

client: LangSmithClient;

715

716

/** Project name */

717

projectName?: string;

718

719

handleLLMStart(llm: any, prompts: string[], runId: string): Promise<void>;

720

handleLLMEnd(output: LLMResult, runId: string): Promise<void>;

721

handleChainStart(chain: any, inputs: ChainValues, runId: string): Promise<void>;

722

handleChainEnd(outputs: ChainValues, runId: string): Promise<void>;

723

}

724

```

725

726

## Types

727

728

### Hub Types

729

730

```typescript { .api }

731

interface HubPullOptions {

732

/** LangSmith API key */

733

apiKey?: string;

734

735

/** LangSmith API URL */

736

apiUrl?: string;

737

738

/** Include model information */

739

includeModel?: boolean;

740

741

/** Model class for deserialization */

742

modelClass?: any;

743

}

744

745

interface HubPushOptions {

746

/** LangSmith API key */

747

apiKey?: string;

748

749

/** LangSmith API URL */

750

apiUrl?: string;

751

752

/** Parent commit hash */

753

parentCommitHash?: string;

754

}

755

```

756

757

### LangSmith Types

758

759

```typescript { .api }

760

type EvaluatorType =

761

| "qa"

762

| "criteria"

763

| "pairwise_string"

764

| "labeled_pairwise_string"

765

| "embedding_distance";

766

767

interface EvaluationCriteria {

768

[criteriaName: string]: string;

769

}

770

771

interface EvaluationFeedback {

772

key: string;

773

score?: number;

774

value?: string;

775

comment?: string;

776

correction?: string;

777

}

778

779

type DistanceMetric = "cosine" | "euclidean" | "manhattan" | "chebyshev";

780

```

781

782

### Storage Types

783

784

```typescript { .api }

785

interface LocalFileStoreOptions {

786

/** Root directory path */

787

rootPath: string;

788

}

789

790

interface EncoderBackedStoreFields<K, V> {

791

/** Underlying storage */

792

store: BaseStore<K, string>;

793

794

/** Key encoding function */

795

keyEncoder: (key: K) => K;

796

797

/** Value serialization function */

798

valueSerializer: (value: V) => string;

799

800

/** Value deserialization function */

801

valueDeserializer: (value: string) => V;

802

}

803

```

804

805

### Serialization Types

806

807

```typescript { .api }

808

interface OptionalImportMap {

809

[moduleName: string]: () => Promise<any>;

810

}

811

812

interface SerializedFields {

813

[key: string]: any;

814

}

815

816

type Serialized = {

817

lc: number;

818

type: string;

819

id: string[];

820

kwargs: Record<string, any>;

821

};

822

```

823

824

### SQL Database Types

825

826

```typescript { .api }

827

interface SqlDatabaseFields {

828

/** Database connection */

829

db: Database;

830

831

/** Database engine */

832

engine?: DatabaseEngine;

833

834

/** Tables to include */

835

includesTables?: string[];

836

837

/** Tables to ignore */

838

ignoreTables?: string[];

839

840

/** Sample rows in table info */

841

sampleRowsInTableInfo?: number;

842

843

/** Custom table information */

844

customTableInfo?: Record<string, string>;

845

}

846

```

847

848

### Callback Types

849

850

```typescript { .api }

851

interface TracerFields {

852

/** LangSmith client */

853

client?: LangSmithClient;

854

855

/** Project name */

856

projectName?: string;

857

858

/** Example ID */

859

exampleId?: string;

860

}

861

862

interface LLMResult {

863

generations: Generation[][];

864

llmOutput?: Record<string, any>;

865

}

866

867

interface Generation {

868

text: string;

869

generationInfo?: Record<string, any>;

870

}

871

```

872

873

## Utility Usage Patterns

874

875

### Application Monitoring

876

877

```typescript

878

import { LangChainTracer, ConsoleCallbackHandler } from "langchain/callbacks";

879

import { LLMChain } from "langchain/chains";

880

881

// Set up monitoring

882

const tracer = new LangChainTracer({

883

projectName: "my-langchain-app"

884

});

885

886

const consoleHandler = new ConsoleCallbackHandler();

887

888

// Use with chains

889

const chain = new LLMChain({

890

llm,

891

prompt,

892

callbacks: [tracer, consoleHandler]

893

});

894

895

const result = await chain.call({ input: "test" });

896

```

897

898

### Performance Caching

899

900

```typescript

901

import { FileSystemCache } from "langchain/cache";

902

import { OpenAI } from "@langchain/openai";

903

904

// Set up persistent cache

905

const cache = new FileSystemCache("./llm_cache");

906

907

const llm = new OpenAI({

908

temperature: 0,

909

cache // Responses will be cached automatically

910

});

911

912

// First call will hit the API

913

const result1 = await llm.predict("What is machine learning?");

914

915

// Second identical call will use cache

916

const result2 = await llm.predict("What is machine learning?");

917

```

918

919

### Data Pipeline with Storage

920

921

```typescript

922

import { LocalFileStore, EncoderBackedStore } from "langchain/storage";

923

import { CacheBackedEmbeddings } from "langchain/embeddings/cache_backed";

924

925

// Set up persistent storage pipeline

926

const rawStore = await LocalFileStore.fromPath("./pipeline_data");

927

928

const jsonStore = new EncoderBackedStore({

929

store: rawStore,

930

keyEncoder: (key: string) => `processed:${key}`,

931

valueSerializer: JSON.stringify,

932

valueDeserializer: JSON.parse

933

});

934

935

// Cache embeddings for performance

936

const embeddingCache = await LocalFileStore.fromPath("./embedding_cache");

937

const cachedEmbeddings = CacheBackedEmbeddings.fromBytesStore(

938

embeddings,

939

embeddingCache

940

);

941

942

// Process and store results

943

await jsonStore.mset([

944

["results", { processed: true, timestamp: Date.now() }]

945

]);

946

```