A robust, performance-focused and full-featured Redis client for Node.js with TypeScript support, clustering, sentinel management, and comprehensive Redis command coverage.
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
ioredis provides powerful command batching and atomic transaction capabilities through pipelining and MULTI/EXEC transactions. These features enable high-performance batch operations and consistent data manipulation.
Pipeline multiple commands for execution in a single network round-trip, dramatically improving performance for batch operations.
// Create pipeline
pipeline(commands?: unknown[][]): ChainableCommander;
interface ChainableCommander {
// All Redis commands available for chaining
get(key: RedisKey): ChainableCommander;
set(key: RedisKey, value: RedisValue): ChainableCommander;
hget(key: RedisKey, field: string): ChainableCommander;
hset(key: RedisKey, field: string, value: RedisValue): ChainableCommander;
// ... all other Redis commands
// Execute pipeline
exec(callback?: Callback<Array<[Error | null, any]>>): Promise<Array<[Error | null, any]>>;
execBuffer(callback?: Callback<Array<[Error | null, any]>>): Promise<Array<[Error | null, any]>>;
// Pipeline properties
readonly length: number;
readonly isPipeline: true;
}Usage Examples:
import Redis from "ioredis";
const redis = new Redis();
// Basic pipeline
const pipeline = redis.pipeline();
pipeline.set("key1", "value1");
pipeline.set("key2", "value2");
pipeline.get("key1");
pipeline.get("key2");
const results = await pipeline.exec();
console.log(results);
// [
// [null, "OK"], // set key1
// [null, "OK"], // set key2
// [null, "value1"], // get key1
// [null, "value2"] // get key2
// ]
// Chained pipeline
const results2 = await redis
.pipeline()
.set("user:123", "Alice")
.hset("user:123:profile", "email", "alice@example.com")
.hset("user:123:profile", "age", "25")
.exec();
// Pipeline with predefined commands
const commands = [
["set", "key1", "value1"],
["set", "key2", "value2"],
["mget", "key1", "key2"]
];
const results3 = await redis.pipeline(commands).exec();Handle individual command errors within pipeline execution.
// Pipeline results format
type PipelineResult = Array<[Error | null, any]>;Usage Examples:
const pipeline = redis.pipeline();
pipeline.set("valid_key", "value");
pipeline.get("nonexistent_key");
pipeline.hget("wrong_type_key", "field"); // Error if key is not a hash
const results = await pipeline.exec();
results.forEach(([error, result], index) => {
if (error) {
console.error(`Command ${index} failed:`, error.message);
} else {
console.log(`Command ${index} result:`, result);
}
});Execute multiple commands atomically using Redis transactions with optimistic locking support.
// Start transaction
multi(options?: { pipeline?: boolean }): ChainableCommander | Promise<"OK">;
// Transaction methods (same interface as pipeline)
interface ChainableCommander {
// All Redis commands available
// ... Redis commands
// Transaction control
exec(callback?: Callback<Array<[Error | null, any]> | null>): Promise<Array<[Error | null, any]> | null>;
discard(): Promise<"OK">;
// Watch keys for optimistic locking (before MULTI)
watch(...keys: RedisKey[]): Promise<"OK">;
unwatch(): Promise<"OK">;
}Usage Examples:
// Basic transaction
const transaction = redis.multi();
transaction.set("account:1:balance", "100");
transaction.set("account:2:balance", "200");
transaction.incrby("account:1:balance", -50);
transaction.incrby("account:2:balance", 50);
const results = await transaction.exec();
if (results) {
console.log("Transaction completed successfully");
} else {
console.log("Transaction was discarded");
}
// Optimistic locking with WATCH
await redis.watch("balance");
const currentBalance = parseInt(await redis.get("balance") || "0");
if (currentBalance >= 10) {
const transaction = redis.multi();
transaction.decrby("balance", 10);
transaction.incrby("purchases", 1);
const results = await transaction.exec();
if (results) {
console.log("Purchase successful");
} else {
console.log("Balance was modified, transaction cancelled");
}
} else {
await redis.unwatch();
console.log("Insufficient balance");
}Automatic command batching for improved performance without manual pipeline management.
interface RedisOptions {
enableAutoPipelining?: boolean;
autoPipeliningIgnoredCommands?: string[];
}
// Redis instance properties
readonly autoPipelineQueueSize: number;Usage Examples:
const redis = new Redis({
enableAutoPipelining: true,
autoPipeliningIgnoredCommands: ["subscribe", "psubscribe"]
});
// These commands will be automatically batched
const promise1 = redis.get("key1");
const promise2 = redis.get("key2");
const promise3 = redis.set("key3", "value3");
// All resolve when the batch executes
const [value1, value2, result3] = await Promise.all([promise1, promise2, promise3]);
// Check queue size
console.log(`Commands in auto-pipeline queue: ${redis.autoPipelineQueueSize}`);Use callbacks with pipeline operations for specific use cases.
pipeline().exec(callback?: Callback<Array<[Error | null, any]>>): Promise<Array<[Error | null, any]>>;const pipeline = redis.pipeline();
pipeline.set("key1", "value1");
pipeline.get("key1");
pipeline.exec((err, results) => {
if (err) {
console.error("Pipeline error:", err);
return;
}
results.forEach(([cmdErr, result], index) => {
if (cmdErr) {
console.error(`Command ${index} error:`, cmdErr);
} else {
console.log(`Command ${index}:`, result);
}
});
});Detect when transactions are rolled back due to WATCH key modifications.
async function transferMoney(fromAccount: string, toAccount: string, amount: number) {
await redis.watch(fromAccount, toAccount);
const fromBalance = parseInt(await redis.get(fromAccount) || "0");
const toBalance = parseInt(await redis.get(toAccount) || "0");
if (fromBalance < amount) {
await redis.unwatch();
throw new Error("Insufficient funds");
}
const transaction = redis.multi();
transaction.set(fromAccount, fromBalance - amount);
transaction.set(toAccount, toBalance + amount);
const results = await transaction.exec();
if (results === null) {
throw new Error("Transaction failed - accounts were modified during transfer");
}
return { success: true, fromBalance: fromBalance - amount, toBalance: toBalance + amount };
}
// Usage
try {
const result = await transferMoney("account:alice", "account:bob", 50);
console.log("Transfer successful:", result);
} catch (error) {
console.error("Transfer failed:", error.message);
}Handle binary data in pipeline operations.
// Buffer variants
execBuffer(callback?: Callback<Array<[Error | null, any]>>): Promise<Array<[Error | null, any]>>;const pipeline = redis.pipeline();
pipeline.set(Buffer.from("binary_key"), Buffer.from("binary_value"));
pipeline.get(Buffer.from("binary_key"));
const results = await pipeline.execBuffer();
const [, [, binaryValue]] = results;
console.log("Binary value:", binaryValue); // BufferImplement complex business logic with transactions and conditional execution.
async function processOrder(userId: string, productId: string, quantity: number) {
const userKey = `user:${userId}`;
const productKey = `product:${productId}`;
const orderKey = `order:${Date.now()}`;
// Watch relevant keys
await redis.watch(userKey, productKey);
// Get current state
const [userCredits, productStock] = await Promise.all([
redis.hget(userKey, "credits").then(c => parseInt(c || "0")),
redis.hget(productKey, "stock").then(s => parseInt(s || "0"))
]);
const productPrice = parseInt(await redis.hget(productKey, "price") || "0");
const totalCost = productPrice * quantity;
// Validate business rules
if (userCredits < totalCost) {
await redis.unwatch();
throw new Error("Insufficient credits");
}
if (productStock < quantity) {
await redis.unwatch();
throw new Error("Insufficient stock");
}
// Execute transaction
const transaction = redis.multi();
transaction.hincrby(userKey, "credits", -totalCost);
transaction.hincrby(productKey, "stock", -quantity);
transaction.hset(orderKey, {
userId,
productId,
quantity: quantity.toString(),
totalCost: totalCost.toString(),
timestamp: Date.now().toString()
});
const results = await transaction.exec();
if (results === null) {
throw new Error("Order processing failed - data was modified during processing");
}
return {
orderId: orderKey,
remainingCredits: userCredits - totalCost,
remainingStock: productStock - quantity
};
}Choose the appropriate batching strategy based on your use case.
// Manual pipeline - full control, explicit batching
const pipeline = redis.pipeline();
for (let i = 0; i < 1000; i++) {
pipeline.set(`key:${i}`, `value:${i}`);
}
const results = await pipeline.exec();
// Auto-pipelining - automatic batching, simpler code
const promises = [];
for (let i = 0; i < 1000; i++) {
promises.push(redis.set(`key:${i}`, `value:${i}`));
}
const results2 = await Promise.all(promises);Minimize transaction scope and duration for better performance.
// Good - minimal transaction scope
await redis.watch("counter");
const current = await redis.get("counter");
const transaction = redis.multi();
transaction.set("counter", parseInt(current) + 1);
await transaction.exec();
// Better - use atomic operations when possible
await redis.incr("counter");type PipelineResult = Array<[Error | null, any]>;
type Callback<T> = (err?: Error | null, result?: T) => void;
type RedisKey = string | Buffer;
type RedisValue = string | Buffer | number;