PouchDB provides efficient bulk document operations and comprehensive querying capabilities for handling multiple documents and retrieving data with flexible filtering and pagination options.
Creates, updates, or deletes multiple documents in a single atomic operation.
/**
* Create, update, or delete multiple documents in a single operation
* @param docs - Array of document objects or object with docs array
* @param options - Bulk operation configuration options
* @param callback - Optional callback function (err, results) => void
* @returns Promise resolving to array of operation results
*/
db.bulkDocs(docs, options, callback);Usage Examples:
// Create multiple documents
const docs = [
{ _id: 'user_001', name: 'Alice', email: 'alice@example.com' },
{ _id: 'user_002', name: 'Bob', email: 'bob@example.com' },
{ _id: 'user_003', name: 'Charlie', email: 'charlie@example.com' }
];
const results = await db.bulkDocs(docs);
console.log(results);
// Results array with success/error for each document
// Alternative format with docs wrapper
const results = await db.bulkDocs({
docs: docs
});
// Update multiple documents
const existingDocs = await db.allDocs({
keys: ['user_001', 'user_002'],
include_docs: true
});
const updatedDocs = existingDocs.rows.map(row => ({
...row.doc,
updated: new Date().toISOString()
}));
const updateResults = await db.bulkDocs(updatedDocs);
// Delete multiple documents
const docsToDelete = [
{ _id: 'user_001', _rev: '1-abc123', _deleted: true },
{ _id: 'user_002', _rev: '1-def456', _deleted: true }
];
const deleteResults = await db.bulkDocs(docsToDelete);interface BulkDocsResult {
/** Operation success status */
ok?: boolean;
/** Document ID */
id: string;
/** New revision (on success) */
rev?: string;
/** Error information (on failure) */
error?: string;
/** Error reason (on failure) */
reason?: string;
/** HTTP status code (on failure) */
status?: number;
}Retrieves all documents with comprehensive filtering and pagination options.
/**
* Retrieve all documents with optional filtering and pagination
* @param options - Query configuration options
* @param callback - Optional callback function (err, result) => void
* @returns Promise resolving to query results
*/
db.allDocs(options, callback);Usage Examples:
// Basic query - get all document IDs
const result = await db.allDocs();
console.log(result.rows.map(row => row.id));
// Include full document content
const docsResult = await db.allDocs({
include_docs: true
});
docsResult.rows.forEach(row => {
console.log(row.doc.name);
});
// Paginated results
const paginatedResult = await db.allDocs({
include_docs: true,
limit: 10,
skip: 20
});
// Query specific documents by ID
const specificDocs = await db.allDocs({
keys: ['user_001', 'user_003', 'user_005'],
include_docs: true
});
// Range queries
const rangeResult = await db.allDocs({
startkey: 'user_001',
endkey: 'user_999',
include_docs: true
});
// Descending order
const descendingResult = await db.allDocs({
descending: true,
limit: 5,
include_docs: true
});interface AllDocsResult {
/** Total number of rows (before limit/skip) */
total_rows: number;
/** Starting offset */
offset: number;
/** Array of document rows */
rows: AllDocsRow[];
}
interface AllDocsRow {
/** Document ID */
id: string;
/** Document key (same as ID) */
key: string;
/** Document value containing revision info */
value: {
rev: string;
deleted?: boolean;
};
/** Full document (if include_docs: true) */
doc?: any;
/** Error information (if document couldn't be retrieved) */
error?: string;
}interface BulkDocsOptions {
/** Whether to assign new revision IDs */
new_edits?: boolean;
/** Additional bulk operation options */
[key: string]: any;
}interface AllDocsOptions {
/** Include full document content in results */
include_docs?: boolean;
/** Include conflicts array for each document */
conflicts?: boolean;
/** Include attachment metadata */
attachments?: boolean;
/** Return attachments as binary data */
binary?: boolean;
/** Array of specific document IDs to retrieve */
keys?: string[];
/** Maximum number of documents to return */
limit?: number;
/** Number of documents to skip */
skip?: number;
/** Reverse result order */
descending?: boolean;
/** Start key for range queries */
startkey?: any;
/** End key for range queries */
endkey?: any;
/** Include documents equal to endkey */
inclusive_end?: boolean;
/** Specific key to query */
key?: any;
/** Additional query options */
[key: string]: any;
}async function createUsersInBatches(users, batchSize = 100) {
const results = [];
for (let i = 0; i < users.length; i += batchSize) {
const batch = users.slice(i, i + batchSize);
try {
const batchResults = await db.bulkDocs(batch);
// Process results for this batch
batchResults.forEach((result, index) => {
if (result.ok) {
console.log(`Created user: ${batch[index].name}`);
} else {
console.error(`Failed to create user ${batch[index].name}:`, result.error);
}
});
results.push(...batchResults);
} catch (err) {
console.error(`Batch ${i / batchSize + 1} failed:`, err);
}
}
return results;
}
// Usage
const users = [
{ _id: 'user_001', name: 'Alice' },
{ _id: 'user_002', name: 'Bob' },
// ... more users
];
const results = await createUsersInBatches(users, 50);async function bulkUpdateWithRetry(updates) {
const results = [];
let remaining = [...updates];
while (remaining.length > 0) {
const bulkResults = await db.bulkDocs(remaining);
const conflicts = [];
bulkResults.forEach((result, index) => {
if (result.ok) {
results.push(result);
} else if (result.status === 409) {
// Conflict - retry with latest revision
conflicts.push(remaining[index]);
} else {
// Other error - log and skip
console.error(`Failed to update ${remaining[index]._id}:`, result.error);
results.push(result);
}
});
// Fetch latest revisions for conflicted documents
if (conflicts.length > 0) {
const latestDocs = await db.allDocs({
keys: conflicts.map(doc => doc._id),
include_docs: true
});
remaining = latestDocs.rows.map((row, index) => ({
...conflicts[index],
_rev: row.doc._rev
}));
} else {
remaining = [];
}
}
return results;
}async function exportAllDocuments(outputCallback) {
const batchSize = 1000;
let skip = 0;
let hasMore = true;
while (hasMore) {
const result = await db.allDocs({
include_docs: true,
limit: batchSize,
skip: skip
});
if (result.rows.length === 0) {
hasMore = false;
} else {
// Process batch
const documents = result.rows
.filter(row => !row.id.startsWith('_design/'))
.map(row => row.doc);
await outputCallback(documents);
skip += batchSize;
hasMore = result.rows.length === batchSize;
}
}
}
// Usage
await exportAllDocuments(async (docs) => {
console.log(`Exporting batch of ${docs.length} documents`);
// Write to file, send to API, etc.
});// Get documents by type using ID prefix
async function getDocumentsByType(type) {
return await db.allDocs({
startkey: `${type}_`,
endkey: `${type}_\ufff0`,
include_docs: true
});
}
// Get active users (assumes IDs like 'user_001', 'user_002', etc.)
const activeUsers = await getDocumentsByType('user');
const activeUserDocs = activeUsers.rows
.map(row => row.doc)
.filter(doc => doc.active === true);
// Get documents within date range (assumes ISO date strings in IDs)
async function getDocumentsByDateRange(startDate, endDate) {
return await db.allDocs({
startkey: startDate,
endkey: endDate,
include_docs: true
});
}async function bulkCreateWithValidation(documents, validator) {
// Validate all documents first
const validationResults = documents.map((doc, index) => {
try {
validator(doc);
return { valid: true, doc, index };
} catch (error) {
return { valid: false, doc, index, error };
}
});
// Separate valid and invalid documents
const validDocs = validationResults
.filter(result => result.valid)
.map(result => result.doc);
const invalidDocs = validationResults
.filter(result => !result.valid);
// Report validation errors
invalidDocs.forEach(result => {
console.error(`Document ${result.index} failed validation:`, result.error.message);
});
// Bulk create valid documents
let bulkResults = [];
if (validDocs.length > 0) {
bulkResults = await db.bulkDocs(validDocs);
}
return {
created: bulkResults.filter(result => result.ok).length,
failed: bulkResults.filter(result => !result.ok).length,
invalid: invalidDocs.length,
results: bulkResults
};
}
// Usage with validator
const userValidator = (doc) => {
if (!doc.name || typeof doc.name !== 'string') {
throw new Error('Name is required and must be a string');
}
if (!doc.email || !doc.email.includes('@')) {
throw new Error('Valid email is required');
}
};
const result = await bulkCreateWithValidation(users, userValidator);
console.log(`Created: ${result.created}, Failed: ${result.failed}, Invalid: ${result.invalid}`);async function processDocumentsIncrementally(processor, options = {}) {
const batchSize = options.batchSize || 100;
const startkey = options.startkey;
let lastKey = startkey;
let processedCount = 0;
while (true) {
const queryOptions = {
include_docs: true,
limit: batchSize + 1, // Get one extra to check if there are more
skip: lastKey ? 1 : 0 // Skip the last key from previous batch
};
if (lastKey) {
queryOptions.startkey = lastKey;
}
const result = await db.allDocs(queryOptions);
if (result.rows.length === 0) {
break; // No more documents
}
// Process the batch (excluding the extra document if present)
const docsToProcess = result.rows.slice(0, batchSize);
const hasMore = result.rows.length > batchSize;
for (const row of docsToProcess) {
await processor(row.doc);
processedCount++;
}
if (!hasMore) {
break;
}
// Update lastKey for next iteration
lastKey = result.rows[result.rows.length - 1].key;
}
return processedCount;
}
// Usage
const processedCount = await processDocumentsIncrementally(
async (doc) => {
// Process each document
console.log(`Processing document: ${doc._id}`);
// Update, transform, or analyze the document
},
{ batchSize: 50 }
);
console.log(`Processed ${processedCount} documents`);// Optimize bulk operations for large datasets
const OPTIMAL_BATCH_SIZE = 1000;
async function optimizedBulkCreate(documents) {
const results = [];
for (let i = 0; i < documents.length; i += OPTIMAL_BATCH_SIZE) {
const batch = documents.slice(i, i + OPTIMAL_BATCH_SIZE);
try {
const batchResults = await db.bulkDocs(batch, {
new_edits: true // Ensure new revisions are created
});
results.push(...batchResults);
} catch (err) {
console.error(`Batch starting at index ${i} failed:`, err);
}
}
return results;
}
// Memory-efficient document processing
async function processLargeDataset(callback) {
let skip = 0;
const limit = 500; // Keep memory usage low
while (true) {
const result = await db.allDocs({
include_docs: false, // Don't load full docs into memory
limit: limit,
skip: skip
});
if (result.rows.length === 0) break;
// Process documents one by one to minimize memory usage
for (const row of result.rows) {
const doc = await db.get(row.id);
await callback(doc);
}
skip += limit;
}
}