Advanced functionality including screenshot generation, video concatenation, metadata analysis, and specialized processing recipes.
Generate video thumbnails and screenshots at specific time points.
/**
* Generate video screenshots
* @param config - Screenshot configuration (number or object)
* @param folder - Optional output directory (legacy parameter)
* @returns FfmpegCommand instance
* @emits 'filenames' event with generated filenames
*/
screenshots(config, folder?) // → FfmpegCommand
// Aliases: takeScreenshots, thumbnail, thumbnails, screenshotConfiguration Object:
interface ScreenshotConfig {
count?: number; // Number of screenshots
folder?: string; // Output directory
filename?: string; // Filename pattern ('%b', '%r', '%s', '%w', '%h', '%i')
timemarks?: string[]; // Specific time points ['25%', '50%', '01:30:00']
timestamps?: string[]; // Alias for timemarks
fastSeek?: boolean; // Use fast but less accurate seeking
size?: string; // Screenshot size ('320x240', '50%', etc.)
}Usage Examples:
// Simple screenshot count
ffmpeg('video.mp4')
.screenshots({
count: 5,
folder: './thumbnails',
filename: 'thumb_%i.jpg'
})
.on('filenames', (filenames) => {
console.log('Screenshots saved:', filenames);
});
// Screenshots at specific times
ffmpeg('movie.mp4')
.screenshots({
timemarks: ['25%', '50%', '75%'],
folder: './screenshots',
filename: 'scene_%s.png',
size: '640x360'
});
// Precise timestamp screenshots
ffmpeg('presentation.mp4')
.screenshots({
timestamps: ['00:01:30', '00:05:45', '00:12:00'],
folder: './slides',
filename: 'slide_%i.jpg',
fastSeek: false // Accurate seeking
});
// Custom filename patterns
ffmpeg('video.mp4')
.screenshots({
count: 3,
folder: './thumbs',
filename: '%b_thumb_%i_%wx%h.jpg', // basename_thumb_index_widthxheight
size: '300x200'
});
// Legacy syntax (still supported)
ffmpeg('video.mp4')
.screenshots(5, './thumbnails');Filename Pattern Variables:
%b - Input basename (without extension)%r - Resolution (WxH)%s - Timestamp in seconds%w - Width in pixels%h - Height in pixels%i - Screenshot index (1-based)%000i - Zero-padded screenshot index (any number of zeros for padding)%f - Input filename (full filename with extension)Concatenate multiple input files into a single output.
/**
* Concatenate inputs into single file
* @param target - Output file path or writable stream
* @param options - Optional pipe options for streams
* @returns FfmpegCommand instance
*/
concat(target, options?) // → FfmpegCommand
// Aliases: concatenate, mergeToFileUsage Examples:
// Basic concatenation
ffmpeg()
.input('part1.mp4')
.input('part2.mp4')
.input('part3.mp4')
.concat('complete_movie.mp4')
.on('end', () => {
console.log('Concatenation finished');
});
// Concatenate different formats (re-encodes)
ffmpeg()
.input('intro.avi')
.input('main_content.mov')
.input('outro.mp4')
.videoCodec('libx264') // Ensure consistent encoding
.audioCodec('aac')
.concat('unified_format.mp4');
// Stream concatenation
const fs = require('fs');
const outputStream = fs.createWriteStream('concatenated.mp4');
ffmpeg()
.input('video1.mp4')
.input('video2.mp4')
.concat(outputStream, { end: true });
// Concatenate with filters (for format consistency)
ffmpeg()
.input('720p_video.mp4')
.input('1080p_video.mp4')
.complexFilter([
'[0:v]scale=1920:1080[v0]',
'[1:v]scale=1920:1080[v1]',
'[v0][0:a][v1][1:a]concat=n=2:v=1:a=1[outv][outa]'
])
.outputOptions(['-map', '[outv]', '-map', '[outa]'])
.save('uniform_concat.mp4');Analyze input files using FFprobe for detailed media information.
/**
* Analyze input with ffprobe
* @param index - Optional input index (default: current/last input)
* @param options - Optional ffprobe options array
* @param callback - Callback function (err, data) => void
*/
ffprobe(index?, options?, callback) // → void
/**
* Static method for direct file analysis
* @param file - Input file path
* @param callback - Callback function (err, data) => void
*/
FfmpegCommand.ffprobe(file, callback) // → void
// Also supports: FfmpegCommand.ffprobe(file, options, callback)Usage Examples:
// Basic metadata analysis
ffmpeg.ffprobe('video.mp4', (err, metadata) => {
if (err) {
console.error('Probe error:', err);
return;
}
console.log('Duration:', metadata.format.duration);
console.log('Bitrate:', metadata.format.bit_rate);
console.log('Format:', metadata.format.format_name);
metadata.streams.forEach((stream, index) => {
console.log(`Stream ${index}:`, {
type: stream.codec_type,
codec: stream.codec_name,
duration: stream.duration
});
});
});
// Probe specific input in command
ffmpeg('input1.mp4')
.input('input2.avi')
.ffprobe(1, (err, metadata) => { // Probe second input
if (!err) {
console.log('Second input metadata:', metadata);
}
});
// Probe with custom options
ffmpeg.ffprobe('video.mkv', ['-show_chapters', '-show_format'], (err, data) => {
if (!err) {
console.log('Chapters:', data.chapters);
console.log('Format info:', data.format);
}
});
// Promise wrapper for ffprobe
const probeAsync = (file) => {
return new Promise((resolve, reject) => {
ffmpeg.ffprobe(file, (err, metadata) => {
if (err) reject(err);
else resolve(metadata);
});
});
};
// Use with async/await
const analyzeVideo = async (filename) => {
try {
const metadata = await probeAsync(filename);
const videoStream = metadata.streams.find(s => s.codec_type === 'video');
const audioStream = metadata.streams.find(s => s.codec_type === 'audio');
return {
duration: parseFloat(metadata.format.duration),
size: parseInt(metadata.format.size),
video: videoStream ? {
codec: videoStream.codec_name,
width: videoStream.width,
height: videoStream.height,
fps: eval(videoStream.r_frame_rate) // "30000/1001" format
} : null,
audio: audioStream ? {
codec: audioStream.codec_name,
sampleRate: audioStream.sample_rate,
channels: audioStream.channels
} : null
};
} catch (error) {
console.error('Analysis failed:', error);
return null;
}
};Advanced filter configurations for complex processing.
/**
* Set complex filtergraph for advanced processing
* @param spec - Filter specification (string or object array)
* @param map - Optional output stream mappings
* @returns FfmpegCommand instance
*/
complexFilter(spec, map?) // → FfmpegCommand
// Aliases: filterGraphUsage Examples:
// Picture-in-picture with fade transition
ffmpeg()
.input('background.mp4')
.input('overlay.mp4')
.complexFilter([
'[1:v]scale=320:240[pip]',
'[0:v][pip]overlay=main_w-overlay_w-10:10:enable=\'between(t,5,25)\'[out]'
])
.outputOptions(['-map', '[out]', '-map', '0:a'])
.save('pip_video.mp4');
// Multi-input audio mixing
ffmpeg()
.input('music.mp3')
.input('narration.wav')
.input('effects.wav')
.complexFilter([
'[0:a]volume=0.3[music]',
'[1:a]volume=1.0[voice]',
'[2:a]volume=0.5[fx]',
'[music][voice][fx]amix=inputs=3[out]'
])
.outputOptions(['-map', '[out]'])
.save('mixed_audio.mp3');
// Split screen effect
ffmpeg()
.input('left_video.mp4')
.input('right_video.mp4')
.complexFilter([
'[0:v]crop=iw/2:ih:0:0[left]',
'[1:v]crop=iw/2:ih:iw/2:0[right]',
'[left][right]hstack[out]'
])
.outputOptions(['-map', '[out]'])
.save('split_screen.mp4');Process multiple files with consistent settings.
const processVideoFiles = async (inputFiles, outputDir) => {
const baseCommand = ffmpeg()
.videoCodec('libx264')
.audioCodec('aac')
.videoBitrate('2000k')
.size('1920x1080');
const promises = inputFiles.map(inputFile => {
const outputFile = `${outputDir}/${path.basename(inputFile, path.extname(inputFile))}.mp4`;
return new Promise((resolve, reject) => {
baseCommand.clone()
.input(inputFile)
.save(outputFile)
.on('end', resolve)
.on('error', reject);
});
});
await Promise.all(promises);
console.log('Batch processing complete');
};Apply different processing based on input characteristics.
const smartProcess = async (inputFile, outputFile) => {
// Analyze input first
const metadata = await probeAsync(inputFile);
const videoStream = metadata.streams.find(s => s.codec_type === 'video');
let command = ffmpeg(inputFile);
// Adjust processing based on input
if (videoStream.width > 1920) {
command = command.size('1920x?'); // Downscale if too large
}
if (videoStream.codec_name !== 'h264') {
command = command.videoCodec('libx264'); // Re-encode if not H.264
}
// Set bitrate based on resolution
const pixels = videoStream.width * videoStream.height;
const bitrate = Math.max(1000, Math.min(5000, pixels / 500));
command
.videoBitrate(`${bitrate}k`)
.save(outputFile);
};/**
* Screenshot configuration interface
*/
interface ScreenshotConfig {
count?: number; // Number of screenshots to generate
folder?: string; // Output directory path
filename?: string; // Filename pattern with variables
timemarks?: string[]; // Time positions (['25%', '50%'] or ['00:01:30'])
timestamps?: string[]; // Alias for timemarks
fastSeek?: boolean; // Fast seeking (less accurate, faster)
size?: string; // Output size ('WxH', 'W%', etc.)
}
/**
* FFprobe metadata structure (simplified)
*/
interface ProbeData {
streams: StreamInfo[]; // Array of stream information
format: FormatInfo; // Container format information
chapters?: ChapterInfo[]; // Chapter information (if available)
}
/**
* Stream information from FFprobe
*/
interface StreamInfo {
index: number; // Stream index
codec_name: string; // Codec name
codec_type: string; // 'video', 'audio', 'subtitle', 'data'
duration?: string; // Stream duration in seconds
width?: number; // Video width (video streams)
height?: number; // Video height (video streams)
sample_rate?: string; // Audio sample rate (audio streams)
channels?: number; // Audio channel count (audio streams)
r_frame_rate?: string; // Video frame rate as ratio (video streams)
}
/**
* Format information from FFprobe
*/
interface FormatInfo {
filename: string; // Input filename
format_name: string; // Format name
duration: string; // Duration in seconds
size: string; // File size in bytes
bit_rate: string; // Overall bitrate
nb_streams: number; // Number of streams
tags?: { [key: string]: string }; // Metadata tags
}
/**
* Time specification for screenshots
*/
type TimeMarkSpec =
| string // '25%', '50%', '01:30:00.500'
| number; // Seconds as number
/**
* Complex filter specification
*/
type ComplexFilterSpec =
| string // Single filter string
| FilterObject[]; // Array of filter objects
/**
* Filter object for complex filtergraphs
*/
interface FilterObject {
filter: string; // Filter name
options?: string | Object; // Filter parameters
inputs?: string[]; // Input stream labels
outputs?: string[]; // Output stream labels
}