CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/npm-wavesurfer-js

Interactive audio waveform rendering and playback library for web applications

Pending
Overview
Eval results
Files

audio-recording.mddocs/

Audio Recording

Real-time audio recording with waveform visualization, MediaRecorder integration, and microphone access for creating audio recording applications with live visual feedback.

Capabilities

Record Plugin Class

Create and manage audio recording with real-time waveform visualization.

/**
 * Record plugin for real-time audio recording with waveform visualization
 */
class RecordPlugin extends BasePlugin<RecordPluginEvents, RecordPluginOptions> {
  /**
   * Create a record plugin instance
   * @param options - Recording configuration options
   * @returns New RecordPlugin instance
   */
  static create(options?: RecordPluginOptions): RecordPlugin;
  
  /**
   * Start recording audio from microphone
   * @param options - Optional recording start options
   * @returns Promise that resolves when recording starts
   */
  startRecording(options?: StartRecordingOptions): Promise<void>;
  
  /**
   * Stop recording and return recorded audio blob
   * @returns Blob containing recorded audio data
   */
  stopRecording(): Blob;
  
  /**
   * Pause the current recording
   */
  pauseRecording(): void;
  
  /**
   * Resume a paused recording
   */
  resumeRecording(): void;
  
  /**
   * Get the currently recorded audio data
   * @returns Blob of recorded audio or null if not recording
   */
  getRecordedData(): Blob | null;
  
  /**
   * Check if currently recording
   * @returns True if actively recording
   */
  isRecording(): boolean;
  
  /**
   * Check if recording is paused
   * @returns True if recording is paused
   */
  isPaused(): boolean;
}

Usage Examples:

import Record from "wavesurfer.js/dist/plugins/record.esm.js";

// Create record plugin with options
const record = Record.create({
  renderRecordedAudio: true,
  scrollingWaveform: true,
  audioBitsPerSecond: 128000,
  mimeType: "audio/webm",
});

wavesurfer.registerPlugin(record);

// Start recording
document.getElementById("record-btn").addEventListener("click", async () => {
  try {
    await record.startRecording();
    console.log("Recording started");
  } catch (error) {
    console.error("Failed to start recording:", error);
  }
});

// Stop and save recording
document.getElementById("stop-btn").addEventListener("click", () => {
  const recordedBlob = record.stopRecording();
  
  // Save or play back the recording
  const url = URL.createObjectURL(recordedBlob);
  const audio = new Audio(url);
  audio.play();
  
  // Or save to file
  saveRecording(recordedBlob, "recording.webm");
});

// Pause/resume controls
document.getElementById("pause-btn").addEventListener("click", () => {
  if (record.isPaused()) {
    record.resumeRecording();
  } else {
    record.pauseRecording();
  }
});

// Check recording state
console.log(`Recording: ${record.isRecording()}, Paused: ${record.isPaused()}`);

Recording Configuration

Configure recording quality, format, and behavior options.

interface RecordPluginOptions {
  /** Whether to render recorded audio as waveform, defaults to false */
  renderRecordedAudio?: boolean;
  
  /** Enable scrolling waveform during recording, defaults to false */
  scrollingWaveform?: boolean;
  
  /** Audio bitrate for recording quality, defaults to 128000 */
  audioBitsPerSecond?: number;
  
  /** MIME type for recorded audio, defaults to "audio/webm" */
  mimeType?: string;
  
  /** Start recording automatically when plugin loads, defaults to false */
  autoRecord?: boolean;
  
  /** MediaRecorder timeslice for data chunks, defaults to undefined */
  mediaRecorderTimeslice?: number;
  
  /** Continue recording after page reload, defaults to false */
  continuousRecording?: boolean;
}

interface StartRecordingOptions {
  /** Audio device constraints */
  deviceId?: string;
  
  /** Custom MediaStream to record from */
  stream?: MediaStream;
}

Usage Examples:

// High-quality recording setup
const highQualityRecord = Record.create({
  renderRecordedAudio: true,
  scrollingWaveform: true,
  audioBitsPerSecond: 320000, // High bitrate
  mimeType: "audio/webm;codecs=opus",
  mediaRecorderTimeslice: 100, // 100ms chunks
});

// Mobile-optimized recording
const mobileRecord = Record.create({
  renderRecordedAudio: false, // Save CPU
  scrollingWaveform: false,
  audioBitsPerSecond: 64000, // Lower bitrate for mobile
  mimeType: "audio/mp4", // Better mobile support
});

// Auto-recording setup
const autoRecord = Record.create({
  autoRecord: true,
  renderRecordedAudio: true,
  continuousRecording: true,
});

// Device-specific recording
async function startRecordingFromDevice(deviceId) {
  await record.startRecording({ deviceId });
}

// Custom stream recording (e.g., from screen capture)
async function recordFromStream(mediaStream) {
  await record.startRecording({ stream: mediaStream });
}

Device Access and Management

Handle microphone permissions and device selection for recording.

interface RecordPluginDeviceOptions extends MediaTrackConstraints {
  /** Audio device ID to use for recording */
  deviceId?: string;
  
  /** Audio sample rate */
  sampleRate?: number;
  
  /** Number of audio channels */
  channelCount?: number;
  
  /** Echo cancellation */
  echoCancellation?: boolean;
  
  /** Noise suppression */
  noiseSuppression?: boolean;
  
  /** Auto gain control */
  autoGainControl?: boolean;
}

Usage Examples:

// Request microphone permission
async function requestMicrophoneAccess() {
  try {
    const stream = await navigator.mediaDevices.getUserMedia({ 
      audio: {
        sampleRate: 44100,
        channelCount: 2,
        echoCancellation: true,
        noiseSuppression: true,
        autoGainControl: true,
      }
    });
    
    console.log("Microphone access granted");
    return stream;
  } catch (error) {
    console.error("Microphone access denied:", error);
    throw error;
  }
}

// Enumerate audio devices
async function getAudioDevices() {
  const devices = await navigator.mediaDevices.enumerateDevices();
  const audioInputs = devices.filter(device => device.kind === 'audioinput');
  
  return audioInputs.map(device => ({
    deviceId: device.deviceId,
    label: device.label || `Microphone ${device.deviceId.slice(0, 8)}`,
  }));
}

// Device selection UI
async function setupDeviceSelector() {
  const devices = await getAudioDevices();
  const select = document.getElementById("device-select");
  
  devices.forEach(device => {
    const option = document.createElement("option");
    option.value = device.deviceId;
    option.textContent = device.label;
    select.appendChild(option);
  });
  
  select.addEventListener("change", async (event) => {
    const deviceId = event.target.value;
    if (record.isRecording()) {
      record.stopRecording();
    }
    await record.startRecording({ deviceId });
  });
}

Recording Events

Events for tracking recording state, progress, and data availability.

interface RecordPluginEvents extends BasePluginEvents {
  /** When recording starts */
  'record-start': [];
  
  /** When recording stops */
  'record-end': [blob: Blob];
  
  /** When recording is paused */
  'record-pause': [];
  
  /** When recording is resumed */
  'record-resume': [];
  
  /** When recorded audio data is available */
  'record-data-available': [blob: Blob];
  
  /** When recording progress updates */
  'record-progress': [duration: number];
  
  /** When microphone permission is granted */
  'device-ready': [stream: MediaStream];
  
  /** When device/permission error occurs */
  'device-error': [error: Error];
}

Usage Examples:

// Recording state management
record.on("record-start", () => {
  console.log("Recording started");
  document.getElementById("record-status").textContent = "Recording...";
  document.getElementById("record-btn").disabled = true;
  document.getElementById("stop-btn").disabled = false;
});

record.on("record-end", (blob) => {
  console.log("Recording stopped, size:", blob.size);
  document.getElementById("record-status").textContent = "Recording complete";
  document.getElementById("record-btn").disabled = false;
  document.getElementById("stop-btn").disabled = true;
  
  // Auto-save recording
  saveRecording(blob);
});

record.on("record-pause", () => {
  console.log("Recording paused");
  document.getElementById("pause-btn").textContent = "Resume";
});

record.on("record-resume", () => {
  console.log("Recording resumed");
  document.getElementById("pause-btn").textContent = "Pause";
});

// Progress tracking
let recordingStartTime;
record.on("record-start", () => {
  recordingStartTime = Date.now();
});

record.on("record-progress", (duration) => {
  const elapsed = (Date.now() - recordingStartTime) / 1000;
  document.getElementById("record-time").textContent = formatTime(elapsed);
  document.getElementById("progress-bar").style.width = `${(elapsed / 300) * 100}%`; // 5 min max
});

// Data handling
record.on("record-data-available", (blob) => {
  console.log(`Recorded chunk: ${blob.size} bytes`);
  // Stream recording data for real-time processing
  processAudioChunk(blob);
});

// Device management
record.on("device-ready", (stream) => {
  console.log("Microphone ready:", stream.getAudioTracks()[0].label);
  showRecordingIndicator();
});

record.on("device-error", (error) => {
  console.error("Device error:", error);
  
  if (error.name === "NotAllowedError") {
    showError("Microphone permission denied. Please allow microphone access.");
  } else if (error.name === "NotFoundError") {
    showError("No microphone found. Please connect a microphone.");
  } else {
    showError(`Recording error: ${error.message}`);
  }
});

Advanced Recording Features

Additional functionality for professional recording applications.

Usage Examples:

// Real-time audio analysis during recording
let audioContext;
let analyser;
let dataArray;

record.on("device-ready", (stream) => {
  // Set up real-time analysis
  audioContext = new AudioContext();
  const source = audioContext.createMediaStreamSource(stream);
  analyser = audioContext.createAnalyser();
  
  analyser.fftSize = 256;
  dataArray = new Uint8Array(analyser.frequencyBinCount);
  
  source.connect(analyser);
  
  // Start real-time visualization
  visualizeAudio();
});

function visualizeAudio() {
  if (!analyser) return;
  
  analyser.getByteFrequencyData(dataArray);
  
  // Update volume meter
  const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
  updateVolumeMeter(average / 255);
  
  // Continue visualization
  requestAnimationFrame(visualizeAudio);
}

// Automatic recording triggers
let silenceTimeout;
const SILENCE_THRESHOLD = 10; // Volume threshold
const SILENCE_DURATION = 2000; // 2 seconds of silence

function startVoiceActivatedRecording() {
  record.on("record-progress", () => {
    analyser?.getByteFrequencyData(dataArray);
    const volume = dataArray?.reduce((sum, value) => sum + value, 0) / (dataArray?.length || 1);
    
    if (volume < SILENCE_THRESHOLD) {
      // Start silence timer
      if (!silenceTimeout) {
        silenceTimeout = setTimeout(() => {
          record.stopRecording();
          console.log("Auto-stopped due to silence");
        }, SILENCE_DURATION);
      }
    } else {
      // Cancel silence timer on voice activity
      if (silenceTimeout) {
        clearTimeout(silenceTimeout);
        silenceTimeout = null;
      }
    }
  });
}

// Recording quality monitoring
record.on("record-data-available", (blob) => {
  // Check recording quality
  if (blob.size < 1000) { // Very small chunk
    console.warn("Poor recording quality detected");
    showQualityWarning();
  }
});

// Multi-session recording management
class RecordingSession {
  constructor() {
    this.recordings = [];
    this.currentSession = null;
  }
  
  startSession(name) {
    this.currentSession = {
      name,
      startTime: Date.now(),
      recordings: [],
    };
  }
  
  addRecording(blob) {
    if (this.currentSession) {
      this.currentSession.recordings.push({
        blob,
        timestamp: Date.now(),
        duration: blob.size / (128000 / 8), // Rough duration estimate
      });
    }
  }
  
  endSession() {
    if (this.currentSession) {
      this.recordings.push(this.currentSession);
      this.currentSession = null;
    }
  }
  
  exportSession(sessionName) {
    const session = this.recordings.find(s => s.name === sessionName);
    if (session) {
      // Combine all recordings in session
      return this.combineBlobs(session.recordings.map(r => r.blob));
    }
  }
}

// Recording backup and recovery
function setupRecordingBackup() {
  record.on("record-data-available", (blob) => {
    // Store chunks in IndexedDB for recovery
    storeRecordingChunk(blob);
  });
  
  record.on("record-end", () => {
    // Clear temporary chunks on successful completion
    clearRecordingChunks();
  });
  
  // Recovery on page load
  window.addEventListener("load", async () => {
    const incompleteChunks = await getIncompleteRecordingChunks();
    if (incompleteChunks.length > 0) {
      const recovered = combineBlobs(incompleteChunks);
      showRecoveryDialog(recovered);
    }
  });
}

// Format conversion and export
function exportRecording(blob, format = "wav") {
  const audioUrl = URL.createObjectURL(blob);
  const audio = new Audio(audioUrl);
  
  audio.addEventListener("loadeddata", () => {
    if (format === "wav") {
      convertToWav(blob).then(wavBlob => {
        downloadBlob(wavBlob, "recording.wav");
      });
    } else {
      downloadBlob(blob, `recording.${format}`);
    }
  });
}

function downloadBlob(blob, filename) {
  const url = URL.createObjectURL(blob);
  const a = document.createElement("a");
  a.href = url;
  a.download = filename;
  a.click();
  URL.revokeObjectURL(url);
}

Install with Tessl CLI

npx tessl i tessl/npm-wavesurfer-js

docs

audio-processing.md

audio-recording.md

core-waveform-control.md

event-system.md

index.md

plugin-system.md

regions-plugin.md

timeline-navigation.md

visual-customization.md

tile.json