Lightweight utilities for inspecting and manipulating video container formats
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
Complete MP4 container format support including generation, parsing, transmuxing, and inspection. The MP4 module is the core component for creating MSE-compatible fragmented MP4 segments from transport streams.
Main class for converting MPEG2-TS segments to fragmented MP4 segments compatible with Media Source Extensions.
/**
* MP4 Transmuxer for converting transport streams to fMP4
* @param options - Configuration options for the transmuxer
*/
class Transmuxer {
constructor(options?: TransmuxerOptions);
/** Add transport stream data for processing */
push(data: Uint8Array): void;
/** Flush any remaining data and finalize processing */
flush(): void;
/** Register event listeners for processed segments */
on(event: 'data', callback: (segment: TransmuxedSegment) => void): void;
on(event: 'done', callback: () => void): void;
/** Remove event listeners */
off(event?: string): void;
}
interface TransmuxerOptions {
/** Base Media Decode Time for the first segment (default: 0) */
baseMediaDecodeTime?: number;
/** Preserve original timestamps instead of rewriting to start at 0 (default: false) */
keepOriginalTimestamps?: boolean;
/** Remux audio and video into single MP4 segment (default: true) */
remux?: boolean;
}
interface TransmuxedSegment {
/** Initialization segment (ftyp + moov boxes) */
initSegment: Uint8Array;
/** Media segment (moof + mdat boxes) */
data: Uint8Array;
/** Extracted ID3 metadata frames */
metadata: {
frames: ID3Frame[];
};
/** Parsed caption data */
captions: CaptionSet[];
}Usage Examples:
const muxjs = require("mux.js");
// Basic transmuxing setup
const transmuxer = new muxjs.mp4.Transmuxer({
keepOriginalTimestamps: false,
remux: true
});
// Handle processed segments
transmuxer.on('data', (segment) => {
if (isFirstSegment) {
// First segment needs both init and data
const combined = new Uint8Array(
segment.initSegment.byteLength + segment.data.byteLength
);
combined.set(segment.initSegment, 0);
combined.set(segment.data, segment.initSegment.byteLength);
sourceBuffer.appendBuffer(combined);
} else {
// Subsequent segments only need data
sourceBuffer.appendBuffer(segment.data);
}
// Process captions
segment.captions.forEach(captionSet => {
captionSet.content.forEach(cue => {
const vttCue = new VTTCue(cue.startTime, cue.endTime, cue.text);
captionTrack.addCue(vttCue);
});
});
});
// Process transport stream data
transmuxer.push(transportStreamData);
transmuxer.flush();Specialized stream for processing audio segments in the transmuxing pipeline.
/**
* Stream for processing audio segments
*/
class AudioSegmentStream {
constructor(track: Track, options?: SegmentOptions);
push(data: PESData): void;
flush(): void;
}Specialized stream for processing video segments in the transmuxing pipeline.
/**
* Stream for processing video segments
*/
class VideoSegmentStream {
constructor(track: Track, options?: SegmentOptions);
push(data: PESData): void;
flush(): void;
}Utilities for generating MP4 box structures and segments.
const generator: {
/** Generate file type box (ftyp) */
ftyp(): Uint8Array;
/** Generate media data box (mdat) containing media samples */
mdat(data: Uint8Array): Uint8Array;
/** Generate movie fragment box (moof) for fragmented MP4 */
moof(sequenceNumber: number, tracks: Track[]): Uint8Array;
/** Generate movie box (moov) containing track metadata */
moov(tracks: Track[], duration: number): Uint8Array;
/** Generate complete initialization segment (ftyp + moov) */
initSegment(tracks: Track[]): Uint8Array;
/** Generate track fragment header box (tfhd) */
tfhd(track: Track): Uint8Array;
/** Generate track run box (trun) with sample data */
trun(track: Track, samples: Sample[]): Uint8Array;
/** Generate media header box (mdhd) */
mdhd(timescale: number, duration: number): Uint8Array;
/** Generate track header box (tkhd) */
tkhd(track: Track): Uint8Array;
};Usage Examples:
const muxjs = require("mux.js");
// Generate initialization segment
const tracks = [{
id: 1,
codec: 'avc',
type: 'video',
timelineStartInfo: { baseMediaDecodeTime: 0 }
}];
const initSegment = muxjs.mp4.generator.initSegment(tracks);
// Generate media data box
const samples = new Uint8Array([/* sample data */]);
const mdatBox = muxjs.mp4.generator.mdat(samples);Utilities for analyzing and extracting metadata from MP4 files.
const probe: {
/** Find specific box type in MP4 data */
findBox(data: Uint8Array, boxType: string): Uint8Array | null;
/** Parse box type from binary data */
parseType(data: Uint8Array): string;
/** Extract timescale from MP4 */
timescale(data: Uint8Array): number;
/** Extract start time from MP4 in seconds */
startTime(data: Uint8Array): number;
/** Extract composition start time */
compositionStartTime(data: Uint8Array): number;
/** Get array of video track IDs */
videoTrackIds(data: Uint8Array): number[];
/** Get track information for all tracks */
tracks(data: Uint8Array): Track[];
/** Extract timescale from media header box */
getTimescaleFromMediaHeader(data: Uint8Array): number;
/** Extract ID3 data from emsg boxes */
getEmsgID3(data: Uint8Array): ID3Frame[];
};Usage Examples:
const muxjs = require("mux.js");
// Analyze MP4 file
const mp4Data = new Uint8Array([/* MP4 file data */]);
// Get basic information
const timescale = muxjs.mp4.probe.timescale(mp4Data);
const startTime = muxjs.mp4.probe.startTime(mp4Data);
const tracks = muxjs.mp4.probe.tracks(mp4Data);
console.log(`Timescale: ${timescale}`);
console.log(`Start time: ${startTime}s`);
console.log(`Tracks: ${tracks.length}`);
// Find specific boxes
const moovBox = muxjs.mp4.probe.findBox(mp4Data, 'moov');
if (moovBox) {
console.log('Found moov box');
}Parser for extracting CEA-608 caption data from MP4 streams.
/**
* Parser for CEA-608 captions embedded in MP4 streams
*/
class CaptionParser {
constructor();
/** Parse caption data from MP4 stream */
parse(data: Uint8Array): Caption[];
/** Clear parser state */
clearParsedCaptions(): void;
/** Get parsed captions */
getParsedCaptions(): Caption[];
}Parser for extracting WebVTT caption data from MP4 streams.
/**
* Parser for WebVTT captions in MP4 streams
*/
class WebVttParser {
constructor();
/** Parse WebVTT data from MP4 stream */
parse(data: Uint8Array): WebVttCue[];
}
interface WebVttCue {
startTime: number;
endTime: number;
text: string;
id?: string;
settings?: string;
}interface Track {
id: number;
codec: 'avc' | 'adts' | string;
type: 'video' | 'audio';
timelineStartInfo: {
baseMediaDecodeTime: number;
};
samplerate?: number;
channelcount?: number;
width?: number;
height?: number;
}
interface Sample {
duration: number;
size: number;
flags: {
isLeading: number;
dependsOn: number;
isDependedOn: number;
hasRedundancy: number;
degradPrio: number;
isNonSync: boolean;
};
compositionTimeOffset: number;
}
interface PESData {
data: Uint8Array;
pts?: number;
dts?: number;
streamType: number;
}
interface SegmentOptions {
baseMediaDecodeTime?: number;
keepOriginalTimestamps?: boolean;
}
interface ID3Frame {
key: string;
data: Uint8Array;
}
interface CaptionSet {
startTime: number;
endTime: number;
content: Caption[];
}
interface Caption {
startTime: number;
endTime: number;
text: string;
line: number;
position: number;
}