The scalable web crawling and scraping library for JavaScript/Node.js that enables development of data extraction and web automation jobs with headless Chrome and Puppeteer.
Configuration and proxy management provide global settings control and distributed crawling capabilities with proxy rotation, authentication, and fault tolerance.
Global configuration management for Crawlee settings and behavior control.
/**
* Global configuration management for Crawlee
*/
class Configuration {
/** Get the global configuration instance */
static getGlobalConfig(): Configuration;
/** Get a configuration value */
get<T = any>(key: string): T;
/** Set a configuration value */
set(key: string, value: any): void;
/** Reset configuration to defaults */
reset(): void;
/** Get all configuration values */
getAll(): Dictionary<any>;
/** Initialize configuration from environment */
buildConfigFromEnv(): void;
/** Storage client configuration */
getStorageClient(): StorageClient;
/** Get event manager instance */
getEventManager(): EventManager;
/** Memory monitoring configuration */
getMemoryInfo(): MemoryInfo;
/** System status monitoring */
getSystemInfo(): SystemInfo;
}Common configuration options available through the Configuration class.
interface ConfigurationKeys {
/** Default dataset ID */
defaultDatasetId: string;
/** Default key-value store ID */
defaultKeyValueStoreId: string;
/** Default request queue ID */
defaultRequestQueueId: string;
/** Local storage directory */
localDataDirectory: string;
/** Whether to purge local data on startup */
purgeOnStart: boolean;
/** Maximum memory usage in MB */
memoryMbytes: number;
/** Available memory ratio threshold */
availableMemoryRatio: number;
/** Maximum old space size for Node.js */
maxOldSpaceSize: number;
/** Log level */
logLevel: 'DEBUG' | 'INFO' | 'WARNING' | 'ERROR' | 'OFF';
/** Whether to use headless browser mode */
headless: boolean;
/** Chrome executable path */
chromeExecutablePath?: string;
/** Default browser viewport */
defaultBrowserViewport: { width: number; height: number };
/** System monitoring interval */
systemInfoIntervalMillis: number;
/** Input charset */
inputCharset: string;
/** Default user agent */
defaultUserAgent: string;
/** HTTP timeout */
defaultRequestTimeoutSecs: number;
/** Maximum HTTP redirects */
maxRequestRedirects: number;
/** Whether to persist storage state */
persistStorage: boolean;
}Usage Examples:
import { Configuration, CheerioCrawler } from "crawlee";
// Get global configuration
const config = Configuration.getGlobalConfig();
// Configure storage settings
config.set('defaultDatasetId', 'my-crawl-results');
config.set('localDataDirectory', './crawlee_storage');
config.set('purgeOnStart', false);
// Configure memory limits
config.set('memoryMbytes', 4096);
config.set('availableMemoryRatio', 0.1);
// Configure browser settings
config.set('headless', true);
config.set('defaultBrowserViewport', { width: 1920, height: 1080 });
// Configure logging
config.set('logLevel', 'INFO');
// Configure HTTP settings
config.set('defaultRequestTimeoutSecs', 30);
config.set('maxRequestRedirects', 10);
// Use configuration in crawlers
const crawler = new CheerioCrawler({
requestHandler: async ({ request }) => {
const memoryLimit = config.get('memoryMbytes');
const currentMemory = config.getMemoryInfo();
if (currentMemory.usedBytes > memoryLimit * 1024 * 1024 * 0.9) {
console.warn('Approaching memory limit');
}
// Process request...
},
});
// Environment-based configuration
config.buildConfigFromEnv();
// Check configuration values
console.log('Local data directory:', config.get('localDataDirectory'));
console.log('Log level:', config.get('logLevel'));
console.log('All config:', config.getAll());Proxy configuration management with support for multiple proxy sources and rotation.
/**
* Proxy configuration and management with rotation support
*/
class ProxyConfiguration {
constructor(options?: ProxyConfigurationOptions);
/** Initialize proxy configuration */
initialize(): Promise<void>;
/** Get a new proxy URL */
newUrl(sessionId?: number | string): Promise<string | undefined>;
/** Get new proxy information */
newProxyInfo(sessionId?: number | string): Promise<ProxyInfo | undefined>;
/** Get proxy statistics */
getProxyStats(): ProxyStats;
/** Mark a proxy as bad */
markProxyBad(proxyInfo: ProxyInfo, errorMessage?: string): void;
/** Reset proxy statistics */
resetProxyStats(): void;
}Configuration options for proxy management.
interface ProxyConfigurationOptions {
/** Array of proxy URLs */
proxyUrls?: string[];
/** Function that returns proxy URLs */
newUrlFunction?: (sessionId?: number | string) => Promise<string | undefined>;
/** Apify Proxy groups to use */
groups?: string[];
/** Apify Proxy country code */
countryCode?: string;
/** Custom password for Apify Proxy */
password?: string;
/** Session persistence time in seconds */
sessionStickinessTimeSecs?: number;
/** Apify Proxy options */
apifyProxyOptions?: ApifyProxyOptions;
/** Whether to rotate proxies */
rotateProxies?: boolean;
/** Proxy rotation strategy */
rotationStrategy?: ProxyRotationStrategy;
}
interface ApifyProxyOptions {
/** Apify proxy groups */
groups?: string[];
/** Country code for geo-targeting */
countryCode?: string;
/** Custom session ID format */
sessionIdFunction?: (request: Request) => string;
/** Whether to use Apify Proxy */
useApifyProxy?: boolean;
/** Apify Proxy password */
password?: string;
}
enum ProxyRotationStrategy {
ROUND_ROBIN = 'ROUND_ROBIN',
RANDOM = 'RANDOM',
SESSION_STICKY = 'SESSION_STICKY',
}Usage Examples:
import { ProxyConfiguration, PuppeteerCrawler } from "crawlee";
// Basic proxy configuration with static URLs
const proxyConfiguration = new ProxyConfiguration({
proxyUrls: [
'http://user:pass@proxy1.example.com:8000',
'http://user:pass@proxy2.example.com:8000',
'http://user:pass@proxy3.example.com:8000',
],
rotationStrategy: ProxyRotationStrategy.ROUND_ROBIN,
});
// Initialize before use
await proxyConfiguration.initialize();
// Use with crawler
const crawler = new PuppeteerCrawler({
proxyConfiguration,
requestHandler: async ({ page, request, proxyInfo }) => {
console.log(`Using proxy: ${proxyInfo?.url}`);
try {
await page.goto(request.url);
// Process page...
} catch (error) {
if (error.message.includes('proxy')) {
// Mark proxy as bad
proxyConfiguration.markProxyBad(proxyInfo, error.message);
}
throw error;
}
},
});
// Custom proxy function
const dynamicProxyConfig = new ProxyConfiguration({
newUrlFunction: async (sessionId) => {
// Fetch proxy from external service
const response = await fetch('https://proxy-service.com/get-proxy');
const proxy = await response.json();
return `http://${proxy.username}:${proxy.password}@${proxy.host}:${proxy.port}`;
},
sessionStickinessTimeSecs: 300, // 5 minutes
});
// Apify Proxy configuration
const apifyProxyConfig = new ProxyConfiguration({
groups: ['RESIDENTIAL', 'DATACENTER'],
countryCode: 'US',
sessionStickinessTimeSecs: 600,
apifyProxyOptions: {
password: process.env.APIFY_PROXY_PASSWORD,
sessionIdFunction: (request) => `session_${request.userData.category}`,
},
});
// Monitor proxy performance
setInterval(() => {
const stats = proxyConfiguration.getProxyStats();
console.log(`Proxy stats: ${stats.successCount}/${stats.totalCount} successful`);
console.log(`Bad proxies: ${stats.badProxyCount}`);
}, 30000);Information about a specific proxy instance.
interface ProxyInfo {
/** Full proxy URL */
url: string;
/** Proxy hostname */
hostname: string;
/** Proxy port number */
port: number;
/** Proxy protocol (http, https, socks5) */
protocol: string;
/** Authentication credentials */
auth?: {
username: string;
password: string;
};
/** Session ID for this proxy */
sessionId?: string | number;
/** Additional proxy metadata */
metadata?: Dictionary<any>;
/** When this proxy was created */
createdAt?: Date;
/** Proxy geographic location */
country?: string;
/** Proxy provider information */
provider?: string;
}
interface ProxyStats {
/** Total proxy requests made */
totalCount: number;
/** Successful proxy requests */
successCount: number;
/** Failed proxy requests */
errorCount: number;
/** Number of bad proxies marked */
badProxyCount: number;
/** Success rate ratio (0-1) */
successRate: number;
/** Average response time */
averageResponseTime: number;
/** Stats by proxy URL */
proxyStats: Dictionary<{
requests: number;
successes: number;
errors: number;
lastUsed: Date;
averageResponseTime: number;
}>;
}Event system for monitoring and reacting to crawler and configuration events.
/**
* Event manager for handling system and crawler events
*/
class EventManager {
/** Register an event listener */
on(eventName: string, listener: EventListener): void;
/** Register a one-time event listener */
once(eventName: string, listener: EventListener): void;
/** Remove an event listener */
off(eventName: string, listener: EventListener): void;
/** Emit an event */
emit(eventName: string, ...args: any[]): void;
/** List all registered events */
listenerCount(eventName: string): number;
/** Remove all listeners for an event */
removeAllListeners(eventName?: string): void;
}
type EventListener = (...args: any[]) => void | Promise<void>;Usage Examples:
import { Configuration, CheerioCrawler } from "crawlee";
const config = Configuration.getGlobalConfig();
const eventManager = config.getEventManager();
// Listen for system events
eventManager.on('memoryWarning', (memoryInfo) => {
console.warn('Memory usage high:', memoryInfo.ratio);
// Implement memory pressure handling
});
eventManager.on('proxyError', (proxyInfo, error) => {
console.error(`Proxy ${proxyInfo.url} failed:`, error.message);
// Log proxy failures for analysis
});
eventManager.on('sessionRetired', (session) => {
console.log(`Session ${session.id} was retired`);
// Track session lifecycle
});
// Emit custom events
const crawler = new CheerioCrawler({
requestHandler: async ({ request, response }) => {
if (response.statusCode === 429) {
eventManager.emit('rateLimitHit', {
url: request.url,
retryAfter: response.headers['retry-after'],
});
}
// Process request...
},
});
// React to custom events
eventManager.on('rateLimitHit', async ({ url, retryAfter }) => {
console.log(`Rate limit hit on ${url}, backing off for ${retryAfter}s`);
// Implement backoff strategy
await sleep(parseInt(retryAfter) * 1000);
});Built-in monitoring for system resources and crawler performance.
interface MemoryInfo {
/** Total system memory in bytes */
totalBytes: number;
/** Free memory in bytes */
freeBytes: number;
/** Used memory in bytes */
usedBytes: number;
/** Available memory in bytes */
availableBytes: number;
/** Memory usage ratio (0-1) */
ratio: number;
/** Node.js heap information */
heapUsed: number;
heapTotal: number;
heapLimit: number;
/** External memory usage */
external: number;
/** Memory usage by category */
breakdown: {
rss: number;
heapUsed: number;
heapTotal: number;
external: number;
};
}
interface SystemInfo {
/** CPU usage information */
cpu: {
usage: number;
loadAverage: number[];
cores: number;
};
/** Memory information */
memory: MemoryInfo;
/** Operating system information */
os: {
platform: string;
arch: string;
release: string;
uptime: number;
};
/** Node.js process information */
process: {
pid: number;
uptime: number;
memoryUsage: NodeJS.MemoryUsage;
cpuUsage: NodeJS.CpuUsage;
};
/** Timestamp of measurement */
timestamp: Date;
}Usage Examples:
import { Configuration, CheerioCrawler } from "crawlee";
const config = Configuration.getGlobalConfig();
// Monitor system resources
setInterval(() => {
const memInfo = config.getMemoryInfo();
const sysInfo = config.getSystemInfo();
console.log(`Memory usage: ${(memInfo.ratio * 100).toFixed(1)}%`);
console.log(`CPU usage: ${(sysInfo.cpu.usage * 100).toFixed(1)}%`);
console.log(`Heap used: ${(memInfo.heapUsed / 1024 / 1024).toFixed(0)}MB`);
// Trigger cleanup if memory usage is high
if (memInfo.ratio > 0.9) {
console.warn('High memory usage, triggering garbage collection');
if (global.gc) {
global.gc();
}
}
}, 10000);
// Use system monitoring in crawler
const crawler = new CheerioCrawler({
requestHandler: async ({ request }) => {
const memInfo = config.getMemoryInfo();
// Adapt behavior based on memory usage
if (memInfo.ratio > 0.8) {
console.log('High memory usage, reducing processing');
// Skip heavy processing or reduce data collection
return;
}
// Normal processing...
},
// Configure based on system capabilities
maxConcurrency: (() => {
const sysInfo = config.getSystemInfo();
const cores = sysInfo.cpu.cores;
return Math.max(1, cores - 1); // Leave one core for system
})(),
});
// Set memory thresholds based on available memory
const totalMemoryGB = config.getMemoryInfo().totalBytes / (1024 ** 3);
config.set('memoryMbytes', Math.floor(totalMemoryGB * 0.8 * 1024)); // Use 80% of available memoryConfiguration integration with storage clients for advanced storage operations.
interface StorageClient {
/** Dataset client for advanced dataset operations */
datasets(): DatasetClient;
/** Key-value store client */
keyValueStores(): KeyValueStoreClient;
/** Request queue client */
requestQueues(): RequestQueueClient;
/** Update client configuration */
setOptions(options: StorageClientOptions): void;
/** Get current configuration */
getOptions(): StorageClientOptions;
}
interface StorageClientOptions {
/** Storage API base URL */
baseUrl?: string;
/** Authentication token */
token?: string;
/** Request timeout in seconds */
timeoutSecs?: number;
/** Maximum retry attempts */
maxRetries?: number;
/** Local storage directory */
localDataDirectory?: string;
/** Whether to use cloud storage */
cloudStorage?: boolean;
}Usage Examples:
import { Configuration } from "crawlee";
const config = Configuration.getGlobalConfig();
const storageClient = config.getStorageClient();
// Configure storage client
storageClient.setOptions({
baseUrl: 'https://api.apify.com/v2',
token: process.env.APIFY_TOKEN,
timeoutSecs: 30,
maxRetries: 3,
cloudStorage: true,
});
// Use advanced dataset operations
const datasetClient = storageClient.datasets();
// Custom dataset operations
await datasetClient.pushData('my-dataset', [
{ url: 'example.com', title: 'Example' }
]);
const datasetInfo = await datasetClient.getDataset('my-dataset');
console.log(`Dataset has ${datasetInfo.itemCount} items`);
// Export data with advanced options
await datasetClient.exportDataset('my-dataset', {
format: 'csv',
fields: ['url', 'title'],
clean: true,
});interface Dictionary<T = any> {
[key: string]: T;
}
interface EventEmitter {
on(event: string, listener: Function): this;
once(event: string, listener: Function): this;
emit(event: string, ...args: any[]): boolean;
off(event: string, listener: Function): this;
removeAllListeners(event?: string): this;
}
interface Request<UserData = Dictionary> {
url: string;
loadedUrl?: string;
uniqueKey: string;
userData?: UserData;
label?: string;
method?: string;
headers?: Dictionary<string>;
payload?: string;
}
interface CrawlerOptions {
proxyConfiguration?: ProxyConfiguration;
sessionPoolOptions?: SessionPoolOptions;
maxConcurrency?: number;
maxRequestRetries?: number;
requestTimeoutSecs?: number;
}
interface NodeJSMemoryUsage {
rss: number;
heapTotal: number;
heapUsed: number;
external: number;
arrayBuffers: number;
}
interface NodeJSCpuUsage {
user: number;
system: number;
}
type LogLevel = 'DEBUG' | 'INFO' | 'WARNING' | 'ERROR' | 'OFF';
interface BrowserViewport {
width: number;
height: number;
deviceScaleFactor?: number;
isMobile?: boolean;
hasTouch?: boolean;
isLandscape?: boolean;
}Install with Tessl CLI
npx tessl i tessl/npm-crawlee