High performance Node.js image processing library for resizing JPEG, PNG, WebP, GIF, AVIF and TIFF images
80
Sharp provides comprehensive access to image metadata and pixel-level statistics without requiring full image decoding, enabling efficient analysis and processing workflows.
Retrieve comprehensive image metadata including format, dimensions, color information, and embedded data.
/**
* Access image metadata without decoding pixel data
* @returns Promise resolving to metadata object
*/
metadata(): Promise<Metadata>;
interface Metadata {
/** Image format (jpeg, png, webp, gif, tiff, etc.) */
format: string;
/** Total file size in bytes (Buffer/Stream input only) */
size?: number;
/** Image width in pixels (before EXIF orientation) */
width: number;
/** Image height in pixels (before EXIF orientation) */
height: number;
/** Color space interpretation */
space: string;
/** Number of channels (1=grey, 2=grey+alpha, 3=RGB, 4=RGBA/CMYK) */
channels: number;
/** Pixel depth format (uchar, char, ushort, float, etc.) */
depth: string;
/** Image density in pixels per inch (if present) */
density?: number;
/** Chroma subsampling (JPEG: 4:2:0, 4:4:4, etc.) */
chromaSubsampling?: string;
/** Progressive/interlaced encoding */
isProgressive: boolean;
/** Palette-based encoding (GIF, PNG) */
isPalette: boolean;
/** Bits per sample for each channel */
bitsPerSample?: number;
/** EXIF orientation value (1-8) */
orientation?: number;
/** Auto-oriented dimensions */
autoOrient: {
width: number;
height: number;
};
/** Embedded ICC color profile present */
hasProfile: boolean;
/** Alpha transparency channel present */
hasAlpha: boolean;
/** Number of pages/frames (multi-page formats) */
pages?: number;
/** Height of each page in multi-page image */
pageHeight?: number;
/** Animation loop count (0 = infinite) */
loop?: number;
/** Frame delays in milliseconds */
delay?: number[];
/** Primary page number (HEIF) */
pagePrimary?: number;
/** Default background color */
background?: { r: number; g: number; b: number } | { gray: number };
/** Multi-level image details (OpenSlide) */
levels?: LevelMetadata[];
/** Sub Image File Directories count (OME-TIFF) */
subifds?: number;
/** Resolution unit */
resolutionUnit?: 'inch' | 'cm';
/** ImageMagick format identifier */
formatMagick?: string;
/** HEIF compression format */
compression?: 'av1' | 'hevc';
/** PNG text comments */
comments?: CommentsMetadata[];
/** Raw metadata buffers */
exif?: Buffer;
icc?: Buffer;
iptc?: Buffer;
xmp?: Buffer;
xmpAsString?: string;
tifftagPhotoshop?: Buffer;
}
interface LevelMetadata {
width: number;
height: number;
}
interface CommentsMetadata {
keyword: string;
text: string;
}Usage Examples:
// Basic metadata inspection
const metadata = await sharp('image.jpg').metadata();
console.log(`Format: ${metadata.format}`);
console.log(`Dimensions: ${metadata.width}x${metadata.height}`);
console.log(`Channels: ${metadata.channels}`);
console.log(`Has Alpha: ${metadata.hasAlpha}`);
console.log(`Color Space: ${metadata.space}`);
// Check for specific features
if (metadata.isProgressive) {
console.log('Image uses progressive encoding');
}
if (metadata.orientation && metadata.orientation !== 1) {
console.log(`EXIF orientation: ${metadata.orientation}`);
console.log(`Auto-oriented size: ${metadata.autoOrient.width}x${metadata.autoOrient.height}`);
}
// Multi-page image handling
if (metadata.pages && metadata.pages > 1) {
console.log(`Multi-page image with ${metadata.pages} pages`);
console.log(`Page height: ${metadata.pageHeight}`);
}
// Animation information
if (metadata.delay) {
console.log(`Animated with delays: ${metadata.delay.join(', ')}ms`);
console.log(`Loop count: ${metadata.loop || 'infinite'}`);
}Analyze pixel-level statistics for each channel without full image processing.
/**
* Access pixel-derived statistics for every channel
* @returns Promise resolving to statistics object
*/
stats(): Promise<Stats>;
interface Stats {
/** Per-channel statistics array */
channels: ChannelStats[];
/** Image opacity analysis */
isOpaque: boolean;
/** Histogram-based entropy (experimental) */
entropy: number;
/** Laplacian-based sharpness estimation (experimental) */
sharpness: number;
/** Dominant color in sRGB space (experimental) */
dominant: { r: number; g: number; b: number };
}
interface ChannelStats {
/** Minimum pixel value in channel */
min: number;
/** Maximum pixel value in channel */
max: number;
/** Sum of all pixel values */
sum: number;
/** Sum of squared pixel values */
squaresSum: number;
/** Mean pixel value */
mean: number;
/** Standard deviation */
stdev: number;
/** X coordinate of minimum pixel */
minX: number;
/** Y coordinate of minimum pixel */
minY: number;
/** X coordinate of maximum pixel */
maxX: number;
/** Y coordinate of maximum pixel */
maxY: number;
}Usage Examples:
// Comprehensive statistics analysis
const stats = await sharp('photo.jpg').stats();
console.log(`Image is ${stats.isOpaque ? 'opaque' : 'transparent'}`);
console.log(`Entropy: ${stats.entropy.toFixed(2)}`);
console.log(`Sharpness: ${stats.sharpness.toFixed(2)}`);
console.log(`Dominant color: RGB(${stats.dominant.r}, ${stats.dominant.g}, ${stats.dominant.b})`);
// Per-channel analysis
stats.channels.forEach((channel, index) => {
const channelName = ['Red', 'Green', 'Blue', 'Alpha'][index] || `Channel ${index}`;
console.log(`${channelName} channel:`);
console.log(` Range: ${channel.min} - ${channel.max}`);
console.log(` Mean: ${channel.mean.toFixed(2)}`);
console.log(` Std Dev: ${channel.stdev.toFixed(2)}`);
console.log(` Min at: (${channel.minX}, ${channel.minY})`);
console.log(` Max at: (${channel.maxX}, ${channel.maxY})`);
});
// Quality assessment
const brightness = stats.channels.slice(0, 3).reduce((sum, ch) => sum + ch.mean, 0) / 3;
const contrast = stats.channels.slice(0, 3).reduce((sum, ch) => sum + ch.stdev, 0) / 3;
console.log(`Average brightness: ${brightness.toFixed(1)}`);
console.log(`Average contrast: ${contrast.toFixed(1)}`);
if (stats.sharpness < 50) {
console.log('Image appears to be blurred');
} else if (stats.sharpness > 200) {
console.log('Image appears to be very sharp');
}Batch Metadata Extraction:
const analyzeDirectory = async (directory) => {
const files = await fs.readdir(directory);
const imageFiles = files.filter(f => /\.(jpg|jpeg|png|webp|tiff)$/i.test(f));
const results = await Promise.all(
imageFiles.map(async (file) => {
const filePath = path.join(directory, file);
try {
const [metadata, stats] = await Promise.all([
sharp(filePath).metadata(),
sharp(filePath).stats()
]);
return {
file,
format: metadata.format,
dimensions: `${metadata.width}x${metadata.height}`,
size: metadata.size,
channels: metadata.channels,
hasAlpha: metadata.hasAlpha,
colorSpace: metadata.space,
sharpness: stats.sharpness,
brightness: stats.channels.slice(0, 3).reduce((sum, ch) => sum + ch.mean, 0) / 3
};
} catch (error) {
return { file, error: error.message };
}
})
);
return results;
};Quality Assessment:
const assessImageQuality = async (imagePath) => {
const [metadata, stats] = await Promise.all([
sharp(imagePath).metadata(),
sharp(imagePath).stats()
]);
const assessment = {
file: imagePath,
format: metadata.format,
dimensions: { width: metadata.width, height: metadata.height },
quality: {
sharpness: stats.sharpness,
entropy: stats.entropy,
isProgressive: metadata.isProgressive,
compression: metadata.chromaSubsampling
},
issues: []
};
// Quality checks
if (stats.sharpness < 30) {
assessment.issues.push('Image appears blurred');
}
if (stats.entropy < 6) {
assessment.issues.push('Low image complexity');
}
if (metadata.width < 300 || metadata.height < 300) {
assessment.issues.push('Low resolution');
}
// Channel analysis for exposure issues
const rgbChannels = stats.channels.slice(0, 3);
const avgBrightness = rgbChannels.reduce((sum, ch) => sum + ch.mean, 0) / 3;
if (avgBrightness < 50) {
assessment.issues.push('Underexposed');
} else if (avgBrightness > 200) {
assessment.issues.push('Overexposed');
}
// Check for clipping
const hasClipping = rgbChannels.some(ch => ch.min === 0 || ch.max === 255);
if (hasClipping) {
assessment.issues.push('Clipped highlights/shadows');
}
return assessment;
};Smart Processing Based on Metadata:
const smartProcess = async (input, output) => {
const metadata = await sharp(input).metadata();
let pipeline = sharp(input);
// Auto-orient if needed
if (metadata.orientation && metadata.orientation !== 1) {
pipeline = pipeline.autoOrient();
}
// Resize large images for web
if (metadata.width > 2000 || metadata.height > 2000) {
pipeline = pipeline.resize({
width: 1920,
height: 1920,
fit: 'inside',
withoutEnlargement: true
});
}
// Choose output format based on content
if (metadata.hasAlpha) {
// Preserve transparency
pipeline = pipeline.png({ quality: 90 });
} else if (metadata.format === 'jpeg' || metadata.channels === 3) {
// Photographic content
pipeline = pipeline.jpeg({ quality: 85, progressive: true });
} else {
// Graphics/other content
pipeline = pipeline.webp({ quality: 90 });
}
await pipeline.toFile(output);
};Metadata-Based Validation:
const validateImage = async (imagePath, requirements = {}) => {
const metadata = await sharp(imagePath).metadata();
const validation = {
valid: true,
errors: [],
warnings: []
};
// Format validation
if (requirements.formats && !requirements.formats.includes(metadata.format)) {
validation.valid = false;
validation.errors.push(`Invalid format: ${metadata.format}. Expected: ${requirements.formats.join(', ')}`);
}
// Dimension validation
if (requirements.minWidth && metadata.width < requirements.minWidth) {
validation.valid = false;
validation.errors.push(`Width too small: ${metadata.width}px < ${requirements.minWidth}px`);
}
if (requirements.maxWidth && metadata.width > requirements.maxWidth) {
validation.warnings.push(`Width large: ${metadata.width}px > ${requirements.maxWidth}px`);
}
if (requirements.minHeight && metadata.height < requirements.minHeight) {
validation.valid = false;
validation.errors.push(`Height too small: ${metadata.height}px < ${requirements.minHeight}px`);
}
// Aspect ratio validation
if (requirements.aspectRatio) {
const ratio = metadata.width / metadata.height;
const expected = requirements.aspectRatio;
const tolerance = requirements.aspectTolerance || 0.1;
if (Math.abs(ratio - expected) > tolerance) {
validation.warnings.push(`Aspect ratio ${ratio.toFixed(2)} differs from expected ${expected}`);
}
}
// Color space validation
if (requirements.colorSpace && metadata.space !== requirements.colorSpace) {
validation.warnings.push(`Color space ${metadata.space} differs from expected ${requirements.colorSpace}`);
}
// Profile validation
if (requirements.requireProfile && !metadata.hasProfile) {
validation.warnings.push('No embedded color profile found');
}
return validation;
};
// Usage
const result = await validateImage('upload.jpg', {
formats: ['jpeg', 'png', 'webp'],
minWidth: 800,
minHeight: 600,
maxWidth: 4000,
aspectRatio: 16/9,
aspectTolerance: 0.2,
colorSpace: 'srgb',
requireProfile: true
});EXIF Data Extraction:
const extractExifData = async (imagePath) => {
const metadata = await sharp(imagePath).metadata();
if (metadata.exif) {
// EXIF data is available as a Buffer
// You would typically use an EXIF parsing library here
console.log(`EXIF data size: ${metadata.exif.length} bytes`);
}
if (metadata.orientation) {
const orientations = {
1: 'Normal',
2: 'Flipped horizontally',
3: 'Rotated 180°',
4: 'Flipped vertically',
5: 'Rotated 90° CCW and flipped',
6: 'Rotated 90° CW',
7: 'Rotated 90° CW and flipped',
8: 'Rotated 90° CCW'
};
console.log(`Orientation: ${orientations[metadata.orientation] || 'Unknown'}`);
}
return metadata;
};Performance Considerations:
Install with Tessl CLI
npx tessl i tessl/npm-sharpdocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10