Apply production-ready Firecrawl SDK patterns for TypeScript and Python. Use when implementing Firecrawl integrations, building reusable scraping services, or establishing team coding standards for Firecrawl. Trigger with phrases like "firecrawl SDK patterns", "firecrawl best practices", "firecrawl code patterns", "idiomatic firecrawl", "firecrawl wrapper".
80
77%
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Advisory
Suggest reviewing before use
Optimize this skill with Tessl
npx tessl skill review --optimize ./plugins/saas-packs/firecrawl-pack/skills/firecrawl-sdk-patterns/SKILL.mdProduction-ready patterns for Firecrawl SDK (@mendable/firecrawl-js / firecrawl-py). Covers singleton client, typed wrappers, retry with backoff, response validation, and reusable scraping service patterns.
@mendable/firecrawl-js installed// src/firecrawl/client.ts
import FirecrawlApp from "@mendable/firecrawl-js";
let instance: FirecrawlApp | null = null;
export function getFirecrawl(): FirecrawlApp {
if (!instance) {
if (!process.env.FIRECRAWL_API_KEY) {
throw new Error("FIRECRAWL_API_KEY environment variable is required");
}
instance = new FirecrawlApp({
apiKey: process.env.FIRECRAWL_API_KEY,
...(process.env.FIRECRAWL_API_URL
? { apiUrl: process.env.FIRECRAWL_API_URL }
: {}),
});
}
return instance;
}// src/firecrawl/scrape.ts
import { getFirecrawl } from "./client";
interface ScrapeResult {
url: string;
title: string;
markdown: string;
links: string[];
scrapedAt: string;
}
export async function scrapePage(
url: string,
options?: { waitFor?: number; includeLinks?: boolean }
): Promise<ScrapeResult> {
const firecrawl = getFirecrawl();
const formats: string[] = ["markdown"];
if (options?.includeLinks) formats.push("links");
const result = await firecrawl.scrapeUrl(url, {
formats,
onlyMainContent: true,
...(options?.waitFor ? { waitFor: options.waitFor } : {}),
});
if (!result.success) {
throw new Error(`Scrape failed for ${url}: ${result.error}`);
}
return {
url: result.metadata?.sourceURL || url,
title: result.metadata?.title || "",
markdown: result.markdown || "",
links: result.links || [],
scrapedAt: new Date().toISOString(),
};
}// src/firecrawl/retry.ts
export async function withRetry<T>(
operation: () => Promise<T>,
config = { maxRetries: 3, baseDelayMs: 1000, maxDelayMs: 30000 }
): Promise<T> {
for (let attempt = 0; attempt <= config.maxRetries; attempt++) {
try {
return await operation();
} catch (error: any) {
if (attempt === config.maxRetries) throw error;
const status = error.statusCode || error.status;
// Only retry on rate limits (429) and server errors (5xx)
if (status && status !== 429 && status < 500) throw error;
const delay = Math.min(
config.baseDelayMs * Math.pow(2, attempt) + Math.random() * 500,
config.maxDelayMs
);
console.warn(`Firecrawl retry ${attempt + 1}/${config.maxRetries} in ${delay.toFixed(0)}ms`);
await new Promise(r => setTimeout(r, delay));
}
}
throw new Error("Unreachable");
}
// Usage: await withRetry(() => scrapePage("https://example.com"))// src/firecrawl/service.ts
import PQueue from "p-queue";
import { scrapePage, type ScrapeResult } from "./scrape";
import { withRetry } from "./retry";
export class FirecrawlService {
private queue: PQueue;
constructor(concurrency = 3) {
this.queue = new PQueue({
concurrency,
interval: 1000,
intervalCap: 5, // max 5 requests per second
});
}
async scrape(url: string): Promise<ScrapeResult> {
return this.queue.add(() => withRetry(() => scrapePage(url)));
}
async scrapeMany(urls: string[]): Promise<ScrapeResult[]> {
return Promise.all(urls.map(url => this.scrape(url)));
}
get pending(): number {
return this.queue.pending;
}
}import { z } from "zod";
const FirecrawlScrapeResponse = z.object({
success: z.literal(true),
markdown: z.string().min(1),
metadata: z.object({
title: z.string().optional(),
sourceURL: z.string().url(),
statusCode: z.number().optional(),
}),
});
export function validateScrapeResponse(result: unknown) {
const parsed = FirecrawlScrapeResponse.safeParse(result);
if (!parsed.success) {
console.error("Invalid Firecrawl response:", parsed.error.issues);
return null;
}
return parsed.data;
}# firecrawl_service.py
import os
from firecrawl import FirecrawlApp
from functools import lru_cache
import time
@lru_cache(maxsize=1)
def get_firecrawl() -> FirecrawlApp:
"""Singleton Firecrawl client."""
return FirecrawlApp(api_key=os.environ["FIRECRAWL_API_KEY"])
def scrape_with_retry(url: str, max_retries: int = 3) -> dict:
"""Scrape with exponential backoff."""
for attempt in range(max_retries):
try:
return get_firecrawl().scrape_url(url, params={
"formats": ["markdown"],
"onlyMainContent": True,
})
except Exception as e:
if attempt == max_retries - 1:
raise
delay = (2 ** attempt) + (time.time() % 1)
print(f"Retry {attempt + 1}/{max_retries} in {delay:.1f}s: {e}")
time.sleep(delay)| Pattern | Use Case | Benefit |
|---|---|---|
| Singleton client | All SDK usage | One instance, consistent config |
| Typed wrapper | Business logic | Compile-time safety |
| Retry + backoff | 429 / 5xx errors | Automatic recovery |
| Queue | Multiple URLs | Respect rate limits |
| Zod validation | Any API response | Catch API changes early |
const clients = new Map<string, FirecrawlApp>();
export function getClientForTenant(tenantId: string): FirecrawlApp {
if (!clients.has(tenantId)) {
const apiKey = getTenantApiKey(tenantId);
clients.set(tenantId, new FirecrawlApp({ apiKey }));
}
return clients.get(tenantId)!;
}Apply patterns in firecrawl-core-workflow-a for real-world usage.
c8a915c
If you maintain this skill, you can claim it as your own. Once claimed, you can manage eval scenarios, bundle related skills, attach documentation or rules, and ensure cross-agent compatibility.