Comprehensive debugging utilities for MP4, FLV, and transport stream inspection. These tools are essential for troubleshooting streaming issues, understanding media structure, and debugging transmuxing problems.
Complete MP4 debugging and analysis utilities for understanding MP4 box structure and content.
/**
* MP4 inspection and debugging tools
*/
const mp4Tools: {
/** Inspect MP4 structure and parse all boxes */
inspect(data: Uint8Array): MP4Structure;
/** Convert MP4 structure to human-readable text representation */
textify(structure: MP4Structure): string;
/** Parse MP4 box type from binary data */
parseType(data: Uint8Array): string;
/** Find specific box type in MP4 data */
findBox(data: Uint8Array, boxType: string): Uint8Array | null;
/** Parse track fragment (traf) box */
parseTraf(data: Uint8Array): TrafBox;
/** Parse track fragment decode time (tfdt) box */
parseTfdt(data: Uint8Array): TfdtBox;
/** Parse handler (hdlr) box */
parseHdlr(data: Uint8Array): HdlrBox;
/** Parse track fragment header (tfhd) box */
parseTfhd(data: Uint8Array): TfhdBox;
/** Parse track run (trun) box */
parseTrun(data: Uint8Array): TrunBox;
/** Parse segment index (sidx) box */
parseSidx(data: Uint8Array): SidxBox;
/** Parse sample flags from trun box */
parseSampleFlags(flags: number): SampleFlags;
};
interface MP4Structure {
boxes: MP4Box[];
totalSize: number;
}
interface MP4Box {
type: string;
size: number;
offset: number;
data: Uint8Array;
children?: MP4Box[];
}Usage Examples:
const muxjs = require("mux.js");
// Inspect MP4 file structure
const mp4Data = new Uint8Array([/* MP4 file data */]);
const structure = muxjs.mp4.tools.inspect(mp4Data);
console.log('MP4 Analysis:');
console.log(`Total size: ${structure.totalSize} bytes`);
console.log(`Root boxes: ${structure.boxes.length}`);
// List all boxes
structure.boxes.forEach(box => {
console.log(`Box: ${box.type}, size: ${box.size} bytes`);
if (box.children) {
box.children.forEach(child => {
console.log(` Child: ${child.type}, size: ${child.size} bytes`);
});
}
});
// Get human-readable representation
const textOutput = muxjs.mp4.tools.textify(structure);
console.log('MP4 Structure:');
console.log(textOutput);
// Find specific boxes
const moovBox = muxjs.mp4.tools.findBox(mp4Data, 'moov');
if (moovBox) {
console.log('Found movie box');
// Parse handler box to identify track type
const hdlrBox = muxjs.mp4.tools.findBox(moovBox, 'hdlr');
if (hdlrBox) {
const handler = muxjs.mp4.tools.parseHdlr(hdlrBox);
console.log(`Track handler: ${handler.handlerType}`);
}
}
// Analyze fragmented MP4
const moofBox = muxjs.mp4.tools.findBox(mp4Data, 'moof');
if (moofBox) {
console.log('Found movie fragment');
// Parse track fragment
const trafBox = muxjs.mp4.tools.findBox(moofBox, 'traf');
if (trafBox) {
const traf = muxjs.mp4.tools.parseTraf(trafBox);
console.log(`Track fragment for track ${traf.trackId}`);
// Parse decode time
const tfdtData = muxjs.mp4.tools.findBox(trafBox, 'tfdt');
if (tfdtData) {
const tfdt = muxjs.mp4.tools.parseTfdt(tfdtData);
console.log(`Base media decode time: ${tfdt.baseMediaDecodeTime}`);
}
}
}Tools for analyzing MPEG-2 transport stream structure and content.
/**
* Transport stream inspection and debugging tools
*/
const mp2tTools: {
/** Inspect transport stream structure and packets */
inspect(data: Uint8Array): TransportStreamStructure;
};
interface TransportStreamStructure {
packets: TransportPacketInfo[];
programs: ProgramInfo[];
streams: StreamInfo[];
totalPackets: number;
}
interface TransportPacketInfo {
pid: number;
payloadUnitStartIndicator: boolean;
adaptationFieldControl: number;
continuityCounter: number;
payload: Uint8Array;
}
interface ProgramInfo {
programNumber: number;
pmtPid: number;
streams: StreamInfo[];
}
interface StreamInfo {
pid: number;
streamType: number;
streamTypeDescription: string;
elementaryPidCount: number;
}Usage Examples:
const muxjs = require("mux.js");
// Inspect transport stream
const tsData = new Uint8Array([/* Transport stream data */]);
const tsStructure = muxjs.mp2t.tools.inspect(tsData);
console.log('Transport Stream Analysis:');
console.log(`Total packets: ${tsStructure.totalPackets}`);
console.log(`Programs: ${tsStructure.programs.length}`);
console.log(`Streams: ${tsStructure.streams.length}`);
// Analyze programs
tsStructure.programs.forEach(program => {
console.log(`Program ${program.programNumber}:`);
console.log(` PMT PID: 0x${program.pmtPid.toString(16)}`);
program.streams.forEach(stream => {
console.log(` Stream PID: 0x${stream.pid.toString(16)}`);
console.log(` Type: ${stream.streamTypeDescription} (0x${stream.streamType.toString(16)})`);
console.log(` Packet count: ${stream.elementaryPidCount}`);
});
});
// Analyze packet distribution
const pidCounts = {};
tsStructure.packets.forEach(packet => {
pidCounts[packet.pid] = (pidCounts[packet.pid] || 0) + 1;
});
console.log('Packet distribution by PID:');
Object.entries(pidCounts).forEach(([pid, count]) => {
console.log(` PID 0x${parseInt(pid).toString(16)}: ${count} packets`);
});Tools for analyzing FLV file structure and tags.
/**
* FLV inspection and debugging tools
*/
const flvTools: {
/** Inspect individual FLV tag structure */
inspectTag(data: Uint8Array): FlvTagInfo;
/** Inspect complete FLV file structure */
inspect(data: Uint8Array): FlvStructure;
/** Convert FLV structure to human-readable text */
textify(structure: FlvStructure): string;
};
interface FlvTagInfo {
tagType: number;
tagTypeDescription: string;
dataSize: number;
timestamp: number;
timestampExtended: number;
streamId: number;
data: Uint8Array;
}
interface FlvStructure {
header: FlvHeader;
tags: FlvTagInfo[];
totalSize: number;
duration: number;
}
interface FlvHeader {
signature: string;
version: number;
flags: {
audio: boolean;
video: boolean;
};
headerSize: number;
}Usage Examples:
const muxjs = require("mux.js");
// Inspect FLV file
const flvData = new Uint8Array([/* FLV file data */]);
const flvStructure = muxjs.flv.tools.inspect(flvData);
console.log('FLV Analysis:');
console.log(`Version: ${flvStructure.header.version}`);
console.log(`Has audio: ${flvStructure.header.flags.audio}`);
console.log(`Has video: ${flvStructure.header.flags.video}`);
console.log(`Duration: ${flvStructure.duration}ms`);
console.log(`Total tags: ${flvStructure.tags.length}`);
// Analyze tag types
const tagTypeCounts = {};
flvStructure.tags.forEach(tag => {
const type = tag.tagTypeDescription;
tagTypeCounts[type] = (tagTypeCounts[type] || 0) + 1;
});
console.log('Tag distribution:');
Object.entries(tagTypeCounts).forEach(([type, count]) => {
console.log(` ${type}: ${count} tags`);
});
// Get readable representation
const textOutput = muxjs.flv.tools.textify(flvStructure);
console.log('FLV Structure:');
console.log(textOutput);
// Inspect specific tag
const firstVideoTag = flvStructure.tags.find(tag => tag.tagType === 9);
if (firstVideoTag) {
const tagInfo = muxjs.flv.tools.inspectTag(firstVideoTag.data);
console.log('First video tag:', tagInfo);
}Common workflow for debugging transmuxing issues:
const muxjs = require("mux.js");
function debugTransmuxing(transportStreamData) {
console.log('=== Transport Stream Analysis ===');
// 1. Inspect input transport stream
const tsStructure = muxjs.mp2t.tools.inspect(transportStreamData);
console.log(`Input: ${tsStructure.totalPackets} packets`);
console.log(`Programs: ${tsStructure.programs.length}`);
// 2. Set up transmuxer with debugging
const transmuxer = new muxjs.mp4.Transmuxer();
transmuxer.on('data', (segment) => {
console.log('=== MP4 Output Analysis ===');
// Inspect initialization segment
if (segment.initSegment.length > 0) {
const initStructure = muxjs.mp4.tools.inspect(segment.initSegment);
console.log('Init segment boxes:');
initStructure.boxes.forEach(box => {
console.log(` ${box.type}: ${box.size} bytes`);
});
}
// Inspect media segment
const mediaStructure = muxjs.mp4.tools.inspect(segment.data);
console.log('Media segment boxes:');
mediaStructure.boxes.forEach(box => {
console.log(` ${box.type}: ${box.size} bytes`);
});
// Check for captions
console.log(`Captions: ${segment.captions.length}`);
segment.captions.forEach(captionSet => {
console.log(` ${captionSet.content.length} captions from ${captionSet.startTime}s to ${captionSet.endTime}s`);
});
// Check metadata
console.log(`Metadata frames: ${segment.metadata.frames.length}`);
});
// 3. Process data
transmuxer.push(transportStreamData);
transmuxer.flush();
}
// Usage
debugTransmuxing(transportStreamData);Detailed MP4 structure analysis for troubleshooting:
function analyzeMP4Structure(mp4Data) {
const structure = muxjs.mp4.tools.inspect(mp4Data);
console.log('=== MP4 Structure Analysis ===');
// Check for required boxes
const requiredBoxes = ['ftyp', 'moov'];
const foundBoxes = structure.boxes.map(box => box.type);
requiredBoxes.forEach(required => {
if (foundBoxes.includes(required)) {
console.log(`✓ Found required box: ${required}`);
} else {
console.log(`✗ Missing required box: ${required}`);
}
});
// Analyze tracks
const moovBox = muxjs.mp4.tools.findBox(mp4Data, 'moov');
if (moovBox) {
const trakBoxes = [];
let offset = 8; // Skip box header
while (offset < moovBox.length) {
const boxType = muxjs.mp4.tools.parseType(moovBox.subarray(offset + 4, offset + 8));
if (boxType === 'trak') {
trakBoxes.push(moovBox.subarray(offset));
}
const size = new DataView(moovBox.buffer, moovBox.byteOffset + offset).getUint32(0);
offset += size;
}
console.log(`Tracks found: ${trakBoxes.length}`);
trakBoxes.forEach((trakBox, index) => {
const hdlrBox = muxjs.mp4.tools.findBox(trakBox, 'hdlr');
if (hdlrBox) {
const handler = muxjs.mp4.tools.parseHdlr(hdlrBox);
console.log(` Track ${index + 1}: ${handler.handlerType}`);
}
});
}
// Check for fragmentation
const hasFragments = foundBoxes.includes('moof');
console.log(`Fragmented MP4: ${hasFragments ? 'Yes' : 'No'}`);
return structure;
}// Detailed box analysis
function analyzeBox(boxData, boxType) {
console.log(`=== ${boxType.toUpperCase()} Box Analysis ===`);
switch (boxType) {
case 'tfhd':
const tfhd = muxjs.mp4.tools.parseTfhd(boxData);
console.log(`Track ID: ${tfhd.trackId}`);
console.log(`Base data offset: ${tfhd.baseDataOffset}`);
break;
case 'trun':
const trun = muxjs.mp4.tools.parseTrun(boxData);
console.log(`Sample count: ${trun.sampleCount}`);
console.log(`Data offset: ${trun.dataOffset}`);
break;
case 'sidx':
const sidx = muxjs.mp4.tools.parseSidx(boxData);
console.log(`Reference ID: ${sidx.referenceId}`);
console.log(`Timescale: ${sidx.timescale}`);
break;
}
}// Performance monitoring during inspection
function monitoredInspect(data, format) {
const startTime = performance.now();
let result;
switch (format) {
case 'mp4':
result = muxjs.mp4.tools.inspect(data);
break;
case 'flv':
result = muxjs.flv.tools.inspect(data);
break;
case 'ts':
result = muxjs.mp2t.tools.inspect(data);
break;
}
const endTime = performance.now();
console.log(`${format.toUpperCase()} inspection took ${endTime - startTime}ms`);
return result;
}interface TrafBox {
trackId: number;
baseDataOffset?: number;
sampleDescriptionIndex?: number;
defaultSampleDuration?: number;
defaultSampleSize?: number;
defaultSampleFlags?: number;
}
interface TfdtBox {
version: number;
baseMediaDecodeTime: number;
}
interface HdlrBox {
handlerType: string;
name: string;
}
interface TfhdBox {
trackId: number;
baseDataOffset?: number;
sampleDescriptionIndex?: number;
defaultSampleDuration?: number;
defaultSampleSize?: number;
defaultSampleFlags?: number;
}
interface TrunBox {
sampleCount: number;
dataOffset?: number;
firstSampleFlags?: number;
samples: TrunSample[];
}
interface TrunSample {
duration?: number;
size?: number;
flags?: number;
compositionTimeOffset?: number;
}
interface SidxBox {
referenceId: number;
timescale: number;
earliestPresentationTime: number;
firstOffset: number;
references: SidxReference[];
}
interface SidxReference {
referenceType: number;
referencedSize: number;
subsegmentDuration: number;
startsWithSap: boolean;
sapType: number;
sapDeltaTime: number;
}
interface SampleFlags {
isLeading: number;
dependsOn: number;
isDependedOn: number;
hasRedundancy: number;
degradPrio: number;
isNonSync: boolean;
}