or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

connections.mderrors.mdindex.mdlarge-objects.mdnotifications.mdquery-processing.mdquerying.mdreplication.mdtransactions.mdtypes.md

large-objects.mddocs/

0

# Large Objects

1

2

PostgreSQL large object support for efficient storage and streaming of binary data, files, and multimedia content.

3

4

## Capabilities

5

6

### Large Object Creation

7

8

Create and access PostgreSQL large objects for storing binary data that exceeds standard column size limits.

9

10

```javascript { .api }

11

/**

12

* Create or access a large object

13

* @param oid - Optional object identifier (omit to create new)

14

* @param mode - Access mode flags (default: 0x00020000 | 0x00040000 = read/write)

15

* @returns Promise resolving to LargeObject instance

16

*/

17

largeObject(oid?: number, mode?: number): Promise<LargeObject>;

18

```

19

20

**Usage Examples:**

21

22

```javascript

23

// Create a new large object

24

const newLargeObject = await sql.largeObject();

25

console.log('Created large object with OID:', newLargeObject.oid);

26

27

// Open existing large object for reading

28

const existingObject = await sql.largeObject(12345, 'r');

29

30

// Open for writing

31

const writableObject = await sql.largeObject(12345, 'w');

32

33

// Open for reading and writing (default)

34

const readWriteObject = await sql.largeObject(12345);

35

```

36

37

## LargeObject Interface

38

39

Comprehensive API for manipulating large object data with streaming support.

40

41

```javascript { .api }

42

interface LargeObject {

43

/** Create readable stream from large object */

44

readable(options?: ReadableOptions): Promise<Readable>;

45

46

/** Create writable stream to large object */

47

writable(options?: WritableOptions): Promise<Writable>;

48

49

/** Close the large object */

50

close(): Promise<void>;

51

52

/** Get current position in object */

53

tell(): Promise<void>;

54

55

/** Read data from current position */

56

read(size: number): Promise<void>;

57

58

/** Write data at current position */

59

write(buffer: Uint8Array): Promise<[{ data: Uint8Array }]>;

60

61

/** Truncate object to specified size */

62

truncate(size: number): Promise<void>;

63

64

/** Seek to position in object */

65

seek(offset: number, whence?: number): Promise<void>;

66

67

/** Get size and current position */

68

size(): Promise<[{ position: bigint, size: bigint }]>;

69

}

70

71

interface ReadableOptions {

72

/** Buffer size for reading */

73

highWaterMark?: number;

74

75

/** Start position in object */

76

start?: number;

77

78

/** End position in object */

79

end?: number;

80

}

81

82

interface WritableOptions {

83

/** Buffer size for writing */

84

highWaterMark?: number;

85

86

/** Start position in object */

87

start?: number;

88

}

89

```

90

91

**Usage Examples:**

92

93

```javascript

94

// Basic large object operations

95

const largeObj = await sql.largeObject();

96

97

// Write data

98

const data = Buffer.from('Hello, large object world!');

99

await largeObj.write(data);

100

101

// Get current position

102

const position = await largeObj.tell();

103

console.log('Current position:', position);

104

105

// Seek to beginning

106

await largeObj.seek(0);

107

108

// Read data

109

const readData = await largeObj.read(26);

110

console.log('Read data:', readData.toString());

111

112

// Get size information

113

const { size, position: currentPos } = await largeObj.size();

114

console.log(`Size: ${size}, Position: ${currentPos}`);

115

116

// Close when done

117

await largeObj.close();

118

```

119

120

## Streaming Operations

121

122

### Reading Large Objects

123

124

Efficiently read large objects using streams for memory-optimized processing.

125

126

```javascript { .api }

127

/**

128

* Seek position constants

129

*/

130

const SEEK_SET = 0; // Absolute position

131

const SEEK_CUR = 1; // Relative to current position

132

const SEEK_END = 2; // Relative to end of object

133

```

134

135

**Usage Examples:**

136

137

```javascript

138

import { createWriteStream } from 'fs';

139

140

// Stream large object to file

141

async function exportLargeObjectToFile(oid, filePath) {

142

const largeObj = await sql.largeObject(oid, 'r');

143

const readable = largeObj.readable();

144

const writeStream = createWriteStream(filePath);

145

146

return new Promise((resolve, reject) => {

147

readable.pipe(writeStream);

148

149

writeStream.on('finish', async () => {

150

await largeObj.close();

151

resolve();

152

});

153

154

writeStream.on('error', async (error) => {

155

await largeObj.close();

156

reject(error);

157

});

158

});

159

}

160

161

// Usage

162

await exportLargeObjectToFile(12345, './exported-file.bin');

163

164

// Stream with custom options

165

const largeObj = await sql.largeObject(oid, 'r');

166

const readable = largeObj.readable({

167

highWaterMark: 64 * 1024, // 64KB chunks

168

start: 1000, // Start at byte 1000

169

end: 50000 // End at byte 50000

170

});

171

172

readable.on('data', (chunk) => {

173

console.log(`Received ${chunk.length} bytes`);

174

processChunk(chunk);

175

});

176

177

readable.on('end', async () => {

178

console.log('Stream finished');

179

await largeObj.close();

180

});

181

```

182

183

### Writing Large Objects

184

185

Upload files and data to large objects using streams.

186

187

```javascript

188

import { createReadStream } from 'fs';

189

190

// Upload file to large object

191

async function uploadFileToLargeObject(filePath) {

192

const largeObj = await sql.largeObject(); // Create new

193

const writable = largeObj.writable();

194

const readStream = createReadStream(filePath);

195

196

return new Promise((resolve, reject) => {

197

readStream.pipe(writable);

198

199

writable.on('finish', async () => {

200

await largeObj.close();

201

resolve(largeObj.oid);

202

});

203

204

writable.on('error', async (error) => {

205

await largeObj.close();

206

reject(error);

207

});

208

});

209

}

210

211

// Usage

212

const oid = await uploadFileToLargeObject('./large-file.pdf');

213

console.log('Uploaded file to large object:', oid);

214

215

// Write with custom buffer size

216

const largeObj = await sql.largeObject();

217

const writable = largeObj.writable({

218

highWaterMark: 128 * 1024 // 128KB buffer

219

});

220

221

// Write multiple chunks

222

const chunks = [

223

Buffer.from('First chunk of data'),

224

Buffer.from('Second chunk of data'),

225

Buffer.from('Final chunk')

226

];

227

228

for (const chunk of chunks) {

229

writable.write(chunk);

230

}

231

232

writable.end();

233

234

writable.on('finish', async () => {

235

console.log('All chunks written');

236

await largeObj.close();

237

});

238

```

239

240

## Advanced Usage Patterns

241

242

### Image Processing Pipeline

243

244

Process images stored as large objects with streaming transformations.

245

246

```javascript

247

import { Transform } from 'stream';

248

import sharp from 'sharp';

249

250

class ImageProcessor {

251

async processImage(sourceOid, targetOid, transformOptions) {

252

const sourceObj = await sql.largeObject(sourceOid, 'r');

253

const targetObj = await sql.largeObject(targetOid, 'w');

254

255

const readable = sourceObj.readable();

256

const writable = targetObj.writable();

257

258

// Create image transformation stream

259

const transformer = sharp()

260

.resize(transformOptions.width, transformOptions.height)

261

.jpeg({ quality: transformOptions.quality || 80 });

262

263

return new Promise((resolve, reject) => {

264

readable

265

.pipe(transformer)

266

.pipe(writable);

267

268

writable.on('finish', async () => {

269

await Promise.all([

270

sourceObj.close(),

271

targetObj.close()

272

]);

273

resolve(targetOid);

274

});

275

276

writable.on('error', async (error) => {

277

await Promise.all([

278

sourceObj.close(),

279

targetObj.close()

280

]);

281

reject(error);

282

});

283

});

284

}

285

286

async createThumbnail(imageOid) {

287

const thumbnailObj = await sql.largeObject(); // Create new for thumbnail

288

289

await this.processImage(imageOid, thumbnailObj.oid, {

290

width: 200,

291

height: 200,

292

quality: 70

293

});

294

295

return thumbnailObj.oid;

296

}

297

}

298

299

// Usage

300

const processor = new ImageProcessor();

301

const thumbnailOid = await processor.createThumbnail(originalImageOid);

302

```

303

304

### File Chunking and Assembly

305

306

Split large files into chunks for efficient transfer and storage.

307

308

```javascript

309

class FileChunker {

310

constructor(chunkSize = 1024 * 1024) { // 1MB chunks

311

this.chunkSize = chunkSize;

312

}

313

314

async splitFile(sourceOid) {

315

const sourceObj = await sql.largeObject(sourceOid, 'r');

316

const { size } = await sourceObj.size();

317

const chunks = [];

318

319

let bytesRead = 0;

320

while (bytesRead < size) {

321

const chunkObj = await sql.largeObject(); // New chunk

322

const chunkSize = Math.min(this.chunkSize, size - bytesRead);

323

324

// Read chunk from source

325

await sourceObj.seek(bytesRead);

326

const chunkData = await sourceObj.read(chunkSize);

327

328

// Write to chunk object

329

await chunkObj.write(chunkData);

330

await chunkObj.close();

331

332

chunks.push({

333

oid: chunkObj.oid,

334

offset: bytesRead,

335

size: chunkSize

336

});

337

338

bytesRead += chunkSize;

339

}

340

341

await sourceObj.close();

342

return chunks;

343

}

344

345

async assembleFile(chunks) {

346

const assembledObj = await sql.largeObject();

347

348

// Sort chunks by offset

349

chunks.sort((a, b) => a.offset - b.offset);

350

351

for (const chunk of chunks) {

352

const chunkObj = await sql.largeObject(chunk.oid, 'r');

353

const chunkData = await chunkObj.read(chunk.size);

354

355

await assembledObj.write(chunkData);

356

await chunkObj.close();

357

}

358

359

return assembledObj.oid;

360

}

361

}

362

363

// Usage

364

const chunker = new FileChunker();

365

const chunks = await chunker.splitFile(largeFileOid);

366

console.log(`File split into ${chunks.length} chunks`);

367

368

const reassembledOid = await chunker.assembleFile(chunks);

369

console.log('File reassembled:', reassembledOid);

370

```

371

372

### Content Management System

373

374

Build a content management system using large objects for file storage.

375

376

```javascript

377

class ContentManager {

378

async storeFile(fileBuffer, metadata) {

379

const transaction = await sql.begin(async (sql) => {

380

// Create large object for file data

381

const largeObj = await sql.largeObject();

382

await largeObj.write(fileBuffer);

383

await largeObj.close();

384

385

// Store metadata in regular table

386

const [file] = await sql`

387

INSERT INTO files (

388

oid, filename, content_type, size,

389

uploaded_at, metadata

390

) VALUES (

391

${largeObj.oid},

392

${metadata.filename},

393

${metadata.contentType},

394

${fileBuffer.length},

395

NOW(),

396

${JSON.stringify(metadata)}

397

)

398

RETURNING *

399

`;

400

401

return file;

402

});

403

404

return transaction;

405

}

406

407

async getFileStream(fileId) {

408

const [file] = await sql`

409

SELECT oid, filename, content_type

410

FROM files

411

WHERE id = ${fileId}

412

`;

413

414

if (!file) {

415

throw new Error(`File ${fileId} not found`);

416

}

417

418

const largeObj = await sql.largeObject(file.oid, 'r');

419

const readable = largeObj.readable();

420

421

// Add metadata to stream

422

readable.filename = file.filename;

423

readable.contentType = file.content_type;

424

425

// Clean up when stream ends

426

readable.on('end', () => largeObj.close());

427

readable.on('error', () => largeObj.close());

428

429

return readable;

430

}

431

432

async deleteFile(fileId) {

433

await sql.begin(async (sql) => {

434

const [file] = await sql`

435

DELETE FROM files

436

WHERE id = ${fileId}

437

RETURNING oid

438

`;

439

440

if (file) {

441

// Delete the large object

442

await sql`SELECT lo_unlink(${file.oid})`;

443

}

444

});

445

}

446

447

async duplicateFile(fileId) {

448

const [originalFile] = await sql`

449

SELECT * FROM files WHERE id = ${fileId}

450

`;

451

452

if (!originalFile) {

453

throw new Error(`File ${fileId} not found`);

454

}

455

456

return await sql.begin(async (sql) => {

457

// Create new large object

458

const newLargeObj = await sql.largeObject();

459

const originalObj = await sql.largeObject(originalFile.oid, 'r');

460

461

// Copy data

462

const readable = originalObj.readable();

463

const writable = newLargeObj.writable();

464

465

await new Promise((resolve, reject) => {

466

readable.pipe(writable);

467

writable.on('finish', resolve);

468

writable.on('error', reject);

469

});

470

471

await Promise.all([

472

originalObj.close(),

473

newLargeObj.close()

474

]);

475

476

// Create database record

477

const [duplicate] = await sql`

478

INSERT INTO files (

479

oid, filename, content_type, size,

480

uploaded_at, metadata

481

) VALUES (

482

${newLargeObj.oid},

483

${'copy_of_' + originalFile.filename},

484

${originalFile.content_type},

485

${originalFile.size},

486

NOW(),

487

${originalFile.metadata}

488

)

489

RETURNING *

490

`;

491

492

return duplicate;

493

});

494

}

495

}

496

497

// Usage

498

const contentManager = new ContentManager();

499

500

// Store a file

501

const fileBuffer = await fs.readFile('./document.pdf');

502

const file = await contentManager.storeFile(fileBuffer, {

503

filename: 'document.pdf',

504

contentType: 'application/pdf',

505

tags: ['important', 'legal']

506

});

507

508

// Retrieve file stream

509

const stream = await contentManager.getFileStream(file.id);

510

stream.pipe(response); // In Express.js

511

512

// Duplicate file

513

const duplicate = await contentManager.duplicateFile(file.id);

514

```

515

516

## Performance Optimization

517

518

### Memory-Efficient Processing

519

520

Process large objects without loading entire content into memory.

521

522

```javascript

523

async function processLargeObjectInChunks(oid, processor) {

524

const largeObj = await sql.largeObject(oid, 'r');

525

const chunkSize = 64 * 1024; // 64KB chunks

526

let position = 0;

527

528

try {

529

while (true) {

530

const chunk = await largeObj.read(chunkSize);

531

532

if (chunk.length === 0) {

533

break; // End of object

534

}

535

536

await processor(chunk, position);

537

position += chunk.length;

538

}

539

} finally {

540

await largeObj.close();

541

}

542

}

543

544

// Usage: Calculate checksum without loading entire file

545

import { createHash } from 'crypto';

546

547

async function calculateChecksum(oid) {

548

const hash = createHash('sha256');

549

550

await processLargeObjectInChunks(oid, (chunk) => {

551

hash.update(chunk);

552

});

553

554

return hash.digest('hex');

555

}

556

557

const checksum = await calculateChecksum(fileOid);

558

```

559

560

### Concurrent Operations

561

562

Handle multiple large objects efficiently with connection pooling.

563

564

```javascript

565

class LargeObjectPool {

566

constructor(maxConcurrent = 5) {

567

this.maxConcurrent = maxConcurrent;

568

this.active = new Set();

569

}

570

571

async process(oids, processor) {

572

const promises = oids.map(oid => this.processOne(oid, processor));

573

return Promise.all(promises);

574

}

575

576

async processOne(oid, processor) {

577

while (this.active.size >= this.maxConcurrent) {

578

await new Promise(resolve => setTimeout(resolve, 100));

579

}

580

581

this.active.add(oid);

582

583

try {

584

const largeObj = await sql.largeObject(oid, 'r');

585

const result = await processor(largeObj);

586

await largeObj.close();

587

return result;

588

} finally {

589

this.active.delete(oid);

590

}

591

}

592

}

593

594

// Usage

595

const pool = new LargeObjectPool(10);

596

597

const results = await pool.process([123, 456, 789], async (largeObj) => {

598

const { size } = await largeObj.size();

599

return size;

600

});

601

602

console.log('File sizes:', results);

603

```