or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

attachments.mdbulk-queries.mdchanges-events.mddatabase-operations.mdindex.mdreplication-sync.md

bulk-queries.mddocs/

0

# Bulk Operations & Queries

1

2

PouchDB provides efficient bulk document operations and comprehensive querying capabilities for handling multiple documents and retrieving data with flexible filtering and pagination options.

3

4

## Capabilities

5

6

### Bulk Document Operations

7

8

#### db.bulkDocs()

9

10

Creates, updates, or deletes multiple documents in a single atomic operation.

11

12

```javascript { .api }

13

/**

14

* Create, update, or delete multiple documents in a single operation

15

* @param docs - Array of document objects or object with docs array

16

* @param options - Bulk operation configuration options

17

* @param callback - Optional callback function (err, results) => void

18

* @returns Promise resolving to array of operation results

19

*/

20

db.bulkDocs(docs, options, callback);

21

```

22

23

**Usage Examples:**

24

25

```javascript

26

// Create multiple documents

27

const docs = [

28

{ _id: 'user_001', name: 'Alice', email: 'alice@example.com' },

29

{ _id: 'user_002', name: 'Bob', email: 'bob@example.com' },

30

{ _id: 'user_003', name: 'Charlie', email: 'charlie@example.com' }

31

];

32

33

const results = await db.bulkDocs(docs);

34

console.log(results);

35

// Results array with success/error for each document

36

37

// Alternative format with docs wrapper

38

const results = await db.bulkDocs({

39

docs: docs

40

});

41

42

// Update multiple documents

43

const existingDocs = await db.allDocs({

44

keys: ['user_001', 'user_002'],

45

include_docs: true

46

});

47

48

const updatedDocs = existingDocs.rows.map(row => ({

49

...row.doc,

50

updated: new Date().toISOString()

51

}));

52

53

const updateResults = await db.bulkDocs(updatedDocs);

54

55

// Delete multiple documents

56

const docsToDelete = [

57

{ _id: 'user_001', _rev: '1-abc123', _deleted: true },

58

{ _id: 'user_002', _rev: '1-def456', _deleted: true }

59

];

60

61

const deleteResults = await db.bulkDocs(docsToDelete);

62

```

63

64

### Bulk Operation Results

65

66

```javascript { .api }

67

interface BulkDocsResult {

68

/** Operation success status */

69

ok?: boolean;

70

71

/** Document ID */

72

id: string;

73

74

/** New revision (on success) */

75

rev?: string;

76

77

/** Error information (on failure) */

78

error?: string;

79

80

/** Error reason (on failure) */

81

reason?: string;

82

83

/** HTTP status code (on failure) */

84

status?: number;

85

}

86

```

87

88

### Document Queries

89

90

#### db.allDocs()

91

92

Retrieves all documents with comprehensive filtering and pagination options.

93

94

```javascript { .api }

95

/**

96

* Retrieve all documents with optional filtering and pagination

97

* @param options - Query configuration options

98

* @param callback - Optional callback function (err, result) => void

99

* @returns Promise resolving to query results

100

*/

101

db.allDocs(options, callback);

102

```

103

104

**Usage Examples:**

105

106

```javascript

107

// Basic query - get all document IDs

108

const result = await db.allDocs();

109

console.log(result.rows.map(row => row.id));

110

111

// Include full document content

112

const docsResult = await db.allDocs({

113

include_docs: true

114

});

115

116

docsResult.rows.forEach(row => {

117

console.log(row.doc.name);

118

});

119

120

// Paginated results

121

const paginatedResult = await db.allDocs({

122

include_docs: true,

123

limit: 10,

124

skip: 20

125

});

126

127

// Query specific documents by ID

128

const specificDocs = await db.allDocs({

129

keys: ['user_001', 'user_003', 'user_005'],

130

include_docs: true

131

});

132

133

// Range queries

134

const rangeResult = await db.allDocs({

135

startkey: 'user_001',

136

endkey: 'user_999',

137

include_docs: true

138

});

139

140

// Descending order

141

const descendingResult = await db.allDocs({

142

descending: true,

143

limit: 5,

144

include_docs: true

145

});

146

```

147

148

### All Docs Results

149

150

```javascript { .api }

151

interface AllDocsResult {

152

/** Total number of rows (before limit/skip) */

153

total_rows: number;

154

155

/** Starting offset */

156

offset: number;

157

158

/** Array of document rows */

159

rows: AllDocsRow[];

160

}

161

162

interface AllDocsRow {

163

/** Document ID */

164

id: string;

165

166

/** Document key (same as ID) */

167

key: string;

168

169

/** Document value containing revision info */

170

value: {

171

rev: string;

172

deleted?: boolean;

173

};

174

175

/** Full document (if include_docs: true) */

176

doc?: any;

177

178

/** Error information (if document couldn't be retrieved) */

179

error?: string;

180

}

181

```

182

183

## Configuration Options

184

185

### BulkDocs Options

186

187

```javascript { .api }

188

interface BulkDocsOptions {

189

/** Whether to assign new revision IDs */

190

new_edits?: boolean;

191

192

/** Additional bulk operation options */

193

[key: string]: any;

194

}

195

```

196

197

### AllDocs Options

198

199

```javascript { .api }

200

interface AllDocsOptions {

201

/** Include full document content in results */

202

include_docs?: boolean;

203

204

/** Include conflicts array for each document */

205

conflicts?: boolean;

206

207

/** Include attachment metadata */

208

attachments?: boolean;

209

210

/** Return attachments as binary data */

211

binary?: boolean;

212

213

/** Array of specific document IDs to retrieve */

214

keys?: string[];

215

216

/** Maximum number of documents to return */

217

limit?: number;

218

219

/** Number of documents to skip */

220

skip?: number;

221

222

/** Reverse result order */

223

descending?: boolean;

224

225

/** Start key for range queries */

226

startkey?: any;

227

228

/** End key for range queries */

229

endkey?: any;

230

231

/** Include documents equal to endkey */

232

inclusive_end?: boolean;

233

234

/** Specific key to query */

235

key?: any;

236

237

/** Additional query options */

238

[key: string]: any;

239

}

240

```

241

242

## Advanced Usage Examples

243

244

### Batch Document Creation with Error Handling

245

246

```javascript

247

async function createUsersInBatches(users, batchSize = 100) {

248

const results = [];

249

250

for (let i = 0; i < users.length; i += batchSize) {

251

const batch = users.slice(i, i + batchSize);

252

253

try {

254

const batchResults = await db.bulkDocs(batch);

255

256

// Process results for this batch

257

batchResults.forEach((result, index) => {

258

if (result.ok) {

259

console.log(`Created user: ${batch[index].name}`);

260

} else {

261

console.error(`Failed to create user ${batch[index].name}:`, result.error);

262

}

263

});

264

265

results.push(...batchResults);

266

} catch (err) {

267

console.error(`Batch ${i / batchSize + 1} failed:`, err);

268

}

269

}

270

271

return results;

272

}

273

274

// Usage

275

const users = [

276

{ _id: 'user_001', name: 'Alice' },

277

{ _id: 'user_002', name: 'Bob' },

278

// ... more users

279

];

280

281

const results = await createUsersInBatches(users, 50);

282

```

283

284

### Bulk Update with Conflict Resolution

285

286

```javascript

287

async function bulkUpdateWithRetry(updates) {

288

const results = [];

289

let remaining = [...updates];

290

291

while (remaining.length > 0) {

292

const bulkResults = await db.bulkDocs(remaining);

293

const conflicts = [];

294

295

bulkResults.forEach((result, index) => {

296

if (result.ok) {

297

results.push(result);

298

} else if (result.status === 409) {

299

// Conflict - retry with latest revision

300

conflicts.push(remaining[index]);

301

} else {

302

// Other error - log and skip

303

console.error(`Failed to update ${remaining[index]._id}:`, result.error);

304

results.push(result);

305

}

306

});

307

308

// Fetch latest revisions for conflicted documents

309

if (conflicts.length > 0) {

310

const latestDocs = await db.allDocs({

311

keys: conflicts.map(doc => doc._id),

312

include_docs: true

313

});

314

315

remaining = latestDocs.rows.map((row, index) => ({

316

...conflicts[index],

317

_rev: row.doc._rev

318

}));

319

} else {

320

remaining = [];

321

}

322

}

323

324

return results;

325

}

326

```

327

328

### Efficient Data Export

329

330

```javascript

331

async function exportAllDocuments(outputCallback) {

332

const batchSize = 1000;

333

let skip = 0;

334

let hasMore = true;

335

336

while (hasMore) {

337

const result = await db.allDocs({

338

include_docs: true,

339

limit: batchSize,

340

skip: skip

341

});

342

343

if (result.rows.length === 0) {

344

hasMore = false;

345

} else {

346

// Process batch

347

const documents = result.rows

348

.filter(row => !row.id.startsWith('_design/'))

349

.map(row => row.doc);

350

351

await outputCallback(documents);

352

353

skip += batchSize;

354

hasMore = result.rows.length === batchSize;

355

}

356

}

357

}

358

359

// Usage

360

await exportAllDocuments(async (docs) => {

361

console.log(`Exporting batch of ${docs.length} documents`);

362

// Write to file, send to API, etc.

363

});

364

```

365

366

### Filtered Document Retrieval

367

368

```javascript

369

// Get documents by type using ID prefix

370

async function getDocumentsByType(type) {

371

return await db.allDocs({

372

startkey: `${type}_`,

373

endkey: `${type}_\ufff0`,

374

include_docs: true

375

});

376

}

377

378

// Get active users (assumes IDs like 'user_001', 'user_002', etc.)

379

const activeUsers = await getDocumentsByType('user');

380

const activeUserDocs = activeUsers.rows

381

.map(row => row.doc)

382

.filter(doc => doc.active === true);

383

384

// Get documents within date range (assumes ISO date strings in IDs)

385

async function getDocumentsByDateRange(startDate, endDate) {

386

return await db.allDocs({

387

startkey: startDate,

388

endkey: endDate,

389

include_docs: true

390

});

391

}

392

```

393

394

### Bulk Operations with Validation

395

396

```javascript

397

async function bulkCreateWithValidation(documents, validator) {

398

// Validate all documents first

399

const validationResults = documents.map((doc, index) => {

400

try {

401

validator(doc);

402

return { valid: true, doc, index };

403

} catch (error) {

404

return { valid: false, doc, index, error };

405

}

406

});

407

408

// Separate valid and invalid documents

409

const validDocs = validationResults

410

.filter(result => result.valid)

411

.map(result => result.doc);

412

413

const invalidDocs = validationResults

414

.filter(result => !result.valid);

415

416

// Report validation errors

417

invalidDocs.forEach(result => {

418

console.error(`Document ${result.index} failed validation:`, result.error.message);

419

});

420

421

// Bulk create valid documents

422

let bulkResults = [];

423

if (validDocs.length > 0) {

424

bulkResults = await db.bulkDocs(validDocs);

425

}

426

427

return {

428

created: bulkResults.filter(result => result.ok).length,

429

failed: bulkResults.filter(result => !result.ok).length,

430

invalid: invalidDocs.length,

431

results: bulkResults

432

};

433

}

434

435

// Usage with validator

436

const userValidator = (doc) => {

437

if (!doc.name || typeof doc.name !== 'string') {

438

throw new Error('Name is required and must be a string');

439

}

440

if (!doc.email || !doc.email.includes('@')) {

441

throw new Error('Valid email is required');

442

}

443

};

444

445

const result = await bulkCreateWithValidation(users, userValidator);

446

console.log(`Created: ${result.created}, Failed: ${result.failed}, Invalid: ${result.invalid}`);

447

```

448

449

### Incremental Data Processing

450

451

```javascript

452

async function processDocumentsIncrementally(processor, options = {}) {

453

const batchSize = options.batchSize || 100;

454

const startkey = options.startkey;

455

let lastKey = startkey;

456

let processedCount = 0;

457

458

while (true) {

459

const queryOptions = {

460

include_docs: true,

461

limit: batchSize + 1, // Get one extra to check if there are more

462

skip: lastKey ? 1 : 0 // Skip the last key from previous batch

463

};

464

465

if (lastKey) {

466

queryOptions.startkey = lastKey;

467

}

468

469

const result = await db.allDocs(queryOptions);

470

471

if (result.rows.length === 0) {

472

break; // No more documents

473

}

474

475

// Process the batch (excluding the extra document if present)

476

const docsToProcess = result.rows.slice(0, batchSize);

477

const hasMore = result.rows.length > batchSize;

478

479

for (const row of docsToProcess) {

480

await processor(row.doc);

481

processedCount++;

482

}

483

484

if (!hasMore) {

485

break;

486

}

487

488

// Update lastKey for next iteration

489

lastKey = result.rows[result.rows.length - 1].key;

490

}

491

492

return processedCount;

493

}

494

495

// Usage

496

const processedCount = await processDocumentsIncrementally(

497

async (doc) => {

498

// Process each document

499

console.log(`Processing document: ${doc._id}`);

500

// Update, transform, or analyze the document

501

},

502

{ batchSize: 50 }

503

);

504

505

console.log(`Processed ${processedCount} documents`);

506

```

507

508

## Performance Considerations

509

510

### Optimizing Bulk Operations

511

512

```javascript

513

// Optimize bulk operations for large datasets

514

const OPTIMAL_BATCH_SIZE = 1000;

515

516

async function optimizedBulkCreate(documents) {

517

const results = [];

518

519

for (let i = 0; i < documents.length; i += OPTIMAL_BATCH_SIZE) {

520

const batch = documents.slice(i, i + OPTIMAL_BATCH_SIZE);

521

522

try {

523

const batchResults = await db.bulkDocs(batch, {

524

new_edits: true // Ensure new revisions are created

525

});

526

527

results.push(...batchResults);

528

} catch (err) {

529

console.error(`Batch starting at index ${i} failed:`, err);

530

}

531

}

532

533

return results;

534

}

535

536

// Memory-efficient document processing

537

async function processLargeDataset(callback) {

538

let skip = 0;

539

const limit = 500; // Keep memory usage low

540

541

while (true) {

542

const result = await db.allDocs({

543

include_docs: false, // Don't load full docs into memory

544

limit: limit,

545

skip: skip

546

});

547

548

if (result.rows.length === 0) break;

549

550

// Process documents one by one to minimize memory usage

551

for (const row of result.rows) {

552

const doc = await db.get(row.id);

553

await callback(doc);

554

}

555

556

skip += limit;

557

}

558

}

559

```