Event-driven architecture for handling requests, responses, and errors. Cacheable Request provides full control over the request lifecycle through a comprehensive event system with strongly typed event listeners.
Handle the actual HTTP request object for sending data and managing the connection.
/**
* Emitted when an HTTP request is made (not from cache)
* @param request - Node.js ClientRequest instance
*/
interface RequestEvent {
event: 'request';
listener: (request: ClientRequest) => void;
}Usage Examples:
import CacheableRequest from "cacheable-request";
import { request } from "node:http";
const cacheableRequest = new CacheableRequest(request).request();
const req = cacheableRequest("http://example.com/api/data");
// Handle request event - REQUIRED to actually send the request
req.on("request", (request) => {
// End the request to send it
request.end();
// Or send data with the request
// request.write(JSON.stringify({ key: "value" }));
// request.end();
});
// POST request with data
const postReq = cacheableRequest({
hostname: "api.example.com",
path: "/users",
method: "POST",
headers: {
"Content-Type": "application/json"
}
});
postReq.on("request", (request) => {
const data = JSON.stringify({ name: "John", email: "john@example.com" });
request.write(data);
request.end();
});Handle responses from both cache and network requests.
/**
* Emitted when a response is received (from cache or network)
* @param response - HTTP response or cached response
*/
interface ResponseEvent {
event: 'response';
listener: (response: CacheResponse) => void;
}
interface CacheResponse {
/** Indicates if response came from cache */
fromCache: boolean;
/** HTTP status code */
statusCode: number;
/** Response headers */
headers: IncomingHttpHeaders;
/** Response URL */
url: string;
/** Cache policy information */
cachePolicy?: CachePolicy;
}Usage Examples:
const req = cacheableRequest("https://api.example.com/users");
req.on("response", (response) => {
console.log("Status:", response.statusCode);
console.log("From cache:", response.fromCache);
console.log("Headers:", response.headers);
if (response.fromCache) {
console.log("Cache age:", response.cachePolicy.age());
console.log("TTL remaining:", response.cachePolicy.timeToLive());
}
// Handle response data
let data = "";
response.on("data", (chunk) => {
data += chunk;
});
response.on("end", () => {
console.log("Complete response:", data);
});
});
// Handle different response types
req.on("response", (response) => {
const contentType = response.headers["content-type"];
if (contentType?.includes("application/json")) {
let jsonData = "";
response.on("data", chunk => jsonData += chunk);
response.on("end", () => {
try {
const parsed = JSON.parse(jsonData);
console.log("JSON response:", parsed);
} catch (e) {
console.error("JSON parse error:", e);
}
});
} else {
console.log("Non-JSON response");
}
});Handle cache errors and request errors with proper error classification.
/**
* Emitted when cache or request errors occur
* @param error - RequestError or CacheError instance
*/
interface ErrorEvent {
event: 'error';
listener: (error: RequestError | CacheError) => void;
}
class RequestError extends Error {
constructor(error: Error);
}
class CacheError extends Error {
constructor(error: Error);
}Usage Examples:
import CacheableRequest, { RequestError, CacheError } from "cacheable-request";
const req = cacheableRequest("https://api.example.com/data");
// Comprehensive error handling
req.on("error", (error) => {
if (error instanceof CacheError) {
console.error("Cache error:", error.message);
// Cache storage issues, serialization errors, etc.
} else if (error instanceof RequestError) {
console.error("Request error:", error.message);
// HTTP request function errors, invalid options, etc.
} else {
console.error("Unknown error:", error);
}
});
// Handle specific error scenarios
req.on("error", (error) => {
if (error instanceof CacheError) {
// Log cache issues but continue with request
console.warn("Cache unavailable, using network:", error.message);
} else {
// Request errors are more serious
console.error("Request failed:", error.message);
throw error;
}
});Comprehensive event handling for production applications.
interface CompleteEventHandling {
requestHandler: (request: ClientRequest) => void;
responseHandler: (response: CacheResponse) => void;
errorHandler: (error: RequestError | CacheError) => void;
}Usage Examples:
function makeRequest(url: string): Promise<any> {
return new Promise((resolve, reject) => {
const req = cacheableRequest(url);
// Handle errors first
req.on("error", (error) => {
if (error instanceof CacheError) {
// Log cache error but don't fail the request
console.warn("Cache error (continuing):", error.message);
} else if (error instanceof RequestError) {
reject(new Error(`Request failed: ${error.message}`));
}
});
// Handle response
req.on("response", (response) => {
let data = "";
response.on("data", (chunk) => {
data += chunk;
});
response.on("end", () => {
try {
const result = {
statusCode: response.statusCode,
fromCache: response.fromCache,
data: data,
headers: response.headers
};
resolve(result);
} catch (error) {
reject(error);
}
});
response.on("error", (error) => {
reject(new Error(`Response error: ${error.message}`));
});
});
// Handle request - must be last
req.on("request", (request) => {
// Handle request errors
request.on("error", (error) => {
reject(new Error(`HTTP error: ${error.message}`));
});
// Handle timeout
request.on("timeout", () => {
request.destroy();
reject(new Error("Request timeout"));
});
// End the request
request.end();
});
});
}
// Usage
try {
const result = await makeRequest("https://api.example.com/users");
console.log("Success:", result);
} catch (error) {
console.error("Request failed:", error.message);
}Full EventEmitter interface with strongly typed event handling.
interface Emitter extends EventEmitter {
// Event listener registration
on(event: 'request', listener: (request: ClientRequest) => void): this;
on(event: 'response', listener: (response: CacheResponse) => void): this;
on(event: 'error', listener: (error: RequestError | CacheError) => void): this;
// One-time event listeners
once(event: 'request', listener: (request: ClientRequest) => void): this;
once(event: 'response', listener: (response: CacheResponse) => void): this;
once(event: 'error', listener: (error: RequestError | CacheError) => void): this;
// Event listener management
addListener(event: 'request', listener: (request: ClientRequest) => void): this;
removeListener(event: 'request', listener: (request: ClientRequest) => void): this;
removeAllListeners(event?: 'request' | 'response' | 'error'): this;
// Event emission
emit(event: 'request', request: ClientRequest): boolean;
emit(event: 'response', response: CacheResponse): boolean;
emit(event: 'error', error: RequestError | CacheError): boolean;
}Usage Examples:
const req = cacheableRequest("https://api.example.com/data");
// Multiple listeners for same event
req.on("response", (response) => {
console.log("Listener 1: Got response");
});
req.on("response", (response) => {
console.log("Listener 2: Processing response");
});
// One-time listener
req.once("response", (response) => {
console.log("This will only run once");
});
// Remove specific listener
function responseHandler(response) {
console.log("Handling response");
}
req.on("response", responseHandler);
req.removeListener("response", responseHandler);
// Remove all listeners for an event
req.removeAllListeners("error");Advanced error handling patterns for robust applications.
interface AdvancedErrorHandling {
retryLogic: (error: Error, attempt: number) => boolean;
fallbackStrategy: (error: Error) => Promise<any>;
errorRecovery: (error: Error) => void;
}Usage Examples:
// Retry logic with exponential backoff
async function requestWithRetry(url: string, maxRetries = 3): Promise<any> {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await new Promise((resolve, reject) => {
const req = cacheableRequest(url);
req.on("error", (error) => {
if (error instanceof CacheError) {
// Cache errors don't need retry
console.warn("Cache error:", error.message);
} else {
reject(error);
}
});
req.on("response", (response) => {
if (response.statusCode >= 500 && attempt < maxRetries) {
// Server error - will retry
reject(new Error(`Server error: ${response.statusCode}`));
} else {
// Process response
let data = "";
response.on("data", chunk => data += chunk);
response.on("end", () => resolve({ data, fromCache: response.fromCache }));
}
});
req.on("request", request => request.end());
});
} catch (error) {
if (attempt === maxRetries) {
throw error;
}
// Exponential backoff
const delay = Math.pow(2, attempt) * 1000;
await new Promise(resolve => setTimeout(resolve, delay));
console.log(`Retry attempt ${attempt + 1} after ${delay}ms`);
}
}
}import { EventEmitter } from "node:events";
import { ClientRequest, IncomingHttpHeaders } from "node:http";
type CacheResponse = ServerResponse | typeof ResponseLike;
interface CachedResponseProperties {
fromCache: boolean;
cachePolicy?: CachePolicy;
statusCode: number;
headers: IncomingHttpHeaders;
url: string;
}