or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

audio-processing.mdaudio-recording.mdcore-waveform-control.mdevent-system.mdindex.mdplugin-system.mdregions-plugin.mdtimeline-navigation.mdvisual-customization.md

audio-recording.mddocs/

0

# Audio Recording

1

2

Real-time audio recording with waveform visualization, MediaRecorder integration, and microphone access for creating audio recording applications with live visual feedback.

3

4

## Capabilities

5

6

### Record Plugin Class

7

8

Create and manage audio recording with real-time waveform visualization.

9

10

```typescript { .api }

11

/**

12

* Record plugin for real-time audio recording with waveform visualization

13

*/

14

class RecordPlugin extends BasePlugin<RecordPluginEvents, RecordPluginOptions> {

15

/**

16

* Create a record plugin instance

17

* @param options - Recording configuration options

18

* @returns New RecordPlugin instance

19

*/

20

static create(options?: RecordPluginOptions): RecordPlugin;

21

22

/**

23

* Start recording audio from microphone

24

* @param options - Optional recording start options

25

* @returns Promise that resolves when recording starts

26

*/

27

startRecording(options?: StartRecordingOptions): Promise<void>;

28

29

/**

30

* Stop recording and return recorded audio blob

31

* @returns Blob containing recorded audio data

32

*/

33

stopRecording(): Blob;

34

35

/**

36

* Pause the current recording

37

*/

38

pauseRecording(): void;

39

40

/**

41

* Resume a paused recording

42

*/

43

resumeRecording(): void;

44

45

/**

46

* Get the currently recorded audio data

47

* @returns Blob of recorded audio or null if not recording

48

*/

49

getRecordedData(): Blob | null;

50

51

/**

52

* Check if currently recording

53

* @returns True if actively recording

54

*/

55

isRecording(): boolean;

56

57

/**

58

* Check if recording is paused

59

* @returns True if recording is paused

60

*/

61

isPaused(): boolean;

62

}

63

```

64

65

**Usage Examples:**

66

67

```typescript

68

import Record from "wavesurfer.js/dist/plugins/record.esm.js";

69

70

// Create record plugin with options

71

const record = Record.create({

72

renderRecordedAudio: true,

73

scrollingWaveform: true,

74

audioBitsPerSecond: 128000,

75

mimeType: "audio/webm",

76

});

77

78

wavesurfer.registerPlugin(record);

79

80

// Start recording

81

document.getElementById("record-btn").addEventListener("click", async () => {

82

try {

83

await record.startRecording();

84

console.log("Recording started");

85

} catch (error) {

86

console.error("Failed to start recording:", error);

87

}

88

});

89

90

// Stop and save recording

91

document.getElementById("stop-btn").addEventListener("click", () => {

92

const recordedBlob = record.stopRecording();

93

94

// Save or play back the recording

95

const url = URL.createObjectURL(recordedBlob);

96

const audio = new Audio(url);

97

audio.play();

98

99

// Or save to file

100

saveRecording(recordedBlob, "recording.webm");

101

});

102

103

// Pause/resume controls

104

document.getElementById("pause-btn").addEventListener("click", () => {

105

if (record.isPaused()) {

106

record.resumeRecording();

107

} else {

108

record.pauseRecording();

109

}

110

});

111

112

// Check recording state

113

console.log(`Recording: ${record.isRecording()}, Paused: ${record.isPaused()}`);

114

```

115

116

### Recording Configuration

117

118

Configure recording quality, format, and behavior options.

119

120

```typescript { .api }

121

interface RecordPluginOptions {

122

/** Whether to render recorded audio as waveform, defaults to false */

123

renderRecordedAudio?: boolean;

124

125

/** Enable scrolling waveform during recording, defaults to false */

126

scrollingWaveform?: boolean;

127

128

/** Audio bitrate for recording quality, defaults to 128000 */

129

audioBitsPerSecond?: number;

130

131

/** MIME type for recorded audio, defaults to "audio/webm" */

132

mimeType?: string;

133

134

/** Start recording automatically when plugin loads, defaults to false */

135

autoRecord?: boolean;

136

137

/** MediaRecorder timeslice for data chunks, defaults to undefined */

138

mediaRecorderTimeslice?: number;

139

140

/** Continue recording after page reload, defaults to false */

141

continuousRecording?: boolean;

142

}

143

144

interface StartRecordingOptions {

145

/** Audio device constraints */

146

deviceId?: string;

147

148

/** Custom MediaStream to record from */

149

stream?: MediaStream;

150

}

151

```

152

153

**Usage Examples:**

154

155

```typescript

156

// High-quality recording setup

157

const highQualityRecord = Record.create({

158

renderRecordedAudio: true,

159

scrollingWaveform: true,

160

audioBitsPerSecond: 320000, // High bitrate

161

mimeType: "audio/webm;codecs=opus",

162

mediaRecorderTimeslice: 100, // 100ms chunks

163

});

164

165

// Mobile-optimized recording

166

const mobileRecord = Record.create({

167

renderRecordedAudio: false, // Save CPU

168

scrollingWaveform: false,

169

audioBitsPerSecond: 64000, // Lower bitrate for mobile

170

mimeType: "audio/mp4", // Better mobile support

171

});

172

173

// Auto-recording setup

174

const autoRecord = Record.create({

175

autoRecord: true,

176

renderRecordedAudio: true,

177

continuousRecording: true,

178

});

179

180

// Device-specific recording

181

async function startRecordingFromDevice(deviceId) {

182

await record.startRecording({ deviceId });

183

}

184

185

// Custom stream recording (e.g., from screen capture)

186

async function recordFromStream(mediaStream) {

187

await record.startRecording({ stream: mediaStream });

188

}

189

```

190

191

### Device Access and Management

192

193

Handle microphone permissions and device selection for recording.

194

195

```typescript { .api }

196

interface RecordPluginDeviceOptions extends MediaTrackConstraints {

197

/** Audio device ID to use for recording */

198

deviceId?: string;

199

200

/** Audio sample rate */

201

sampleRate?: number;

202

203

/** Number of audio channels */

204

channelCount?: number;

205

206

/** Echo cancellation */

207

echoCancellation?: boolean;

208

209

/** Noise suppression */

210

noiseSuppression?: boolean;

211

212

/** Auto gain control */

213

autoGainControl?: boolean;

214

}

215

```

216

217

**Usage Examples:**

218

219

```typescript

220

// Request microphone permission

221

async function requestMicrophoneAccess() {

222

try {

223

const stream = await navigator.mediaDevices.getUserMedia({

224

audio: {

225

sampleRate: 44100,

226

channelCount: 2,

227

echoCancellation: true,

228

noiseSuppression: true,

229

autoGainControl: true,

230

}

231

});

232

233

console.log("Microphone access granted");

234

return stream;

235

} catch (error) {

236

console.error("Microphone access denied:", error);

237

throw error;

238

}

239

}

240

241

// Enumerate audio devices

242

async function getAudioDevices() {

243

const devices = await navigator.mediaDevices.enumerateDevices();

244

const audioInputs = devices.filter(device => device.kind === 'audioinput');

245

246

return audioInputs.map(device => ({

247

deviceId: device.deviceId,

248

label: device.label || `Microphone ${device.deviceId.slice(0, 8)}`,

249

}));

250

}

251

252

// Device selection UI

253

async function setupDeviceSelector() {

254

const devices = await getAudioDevices();

255

const select = document.getElementById("device-select");

256

257

devices.forEach(device => {

258

const option = document.createElement("option");

259

option.value = device.deviceId;

260

option.textContent = device.label;

261

select.appendChild(option);

262

});

263

264

select.addEventListener("change", async (event) => {

265

const deviceId = event.target.value;

266

if (record.isRecording()) {

267

record.stopRecording();

268

}

269

await record.startRecording({ deviceId });

270

});

271

}

272

```

273

274

### Recording Events

275

276

Events for tracking recording state, progress, and data availability.

277

278

```typescript { .api }

279

interface RecordPluginEvents extends BasePluginEvents {

280

/** When recording starts */

281

'record-start': [];

282

283

/** When recording stops */

284

'record-end': [blob: Blob];

285

286

/** When recording is paused */

287

'record-pause': [];

288

289

/** When recording is resumed */

290

'record-resume': [];

291

292

/** When recorded audio data is available */

293

'record-data-available': [blob: Blob];

294

295

/** When recording progress updates */

296

'record-progress': [duration: number];

297

298

/** When microphone permission is granted */

299

'device-ready': [stream: MediaStream];

300

301

/** When device/permission error occurs */

302

'device-error': [error: Error];

303

}

304

```

305

306

**Usage Examples:**

307

308

```typescript

309

// Recording state management

310

record.on("record-start", () => {

311

console.log("Recording started");

312

document.getElementById("record-status").textContent = "Recording...";

313

document.getElementById("record-btn").disabled = true;

314

document.getElementById("stop-btn").disabled = false;

315

});

316

317

record.on("record-end", (blob) => {

318

console.log("Recording stopped, size:", blob.size);

319

document.getElementById("record-status").textContent = "Recording complete";

320

document.getElementById("record-btn").disabled = false;

321

document.getElementById("stop-btn").disabled = true;

322

323

// Auto-save recording

324

saveRecording(blob);

325

});

326

327

record.on("record-pause", () => {

328

console.log("Recording paused");

329

document.getElementById("pause-btn").textContent = "Resume";

330

});

331

332

record.on("record-resume", () => {

333

console.log("Recording resumed");

334

document.getElementById("pause-btn").textContent = "Pause";

335

});

336

337

// Progress tracking

338

let recordingStartTime;

339

record.on("record-start", () => {

340

recordingStartTime = Date.now();

341

});

342

343

record.on("record-progress", (duration) => {

344

const elapsed = (Date.now() - recordingStartTime) / 1000;

345

document.getElementById("record-time").textContent = formatTime(elapsed);

346

document.getElementById("progress-bar").style.width = `${(elapsed / 300) * 100}%`; // 5 min max

347

});

348

349

// Data handling

350

record.on("record-data-available", (blob) => {

351

console.log(`Recorded chunk: ${blob.size} bytes`);

352

// Stream recording data for real-time processing

353

processAudioChunk(blob);

354

});

355

356

// Device management

357

record.on("device-ready", (stream) => {

358

console.log("Microphone ready:", stream.getAudioTracks()[0].label);

359

showRecordingIndicator();

360

});

361

362

record.on("device-error", (error) => {

363

console.error("Device error:", error);

364

365

if (error.name === "NotAllowedError") {

366

showError("Microphone permission denied. Please allow microphone access.");

367

} else if (error.name === "NotFoundError") {

368

showError("No microphone found. Please connect a microphone.");

369

} else {

370

showError(`Recording error: ${error.message}`);

371

}

372

});

373

```

374

375

### Advanced Recording Features

376

377

Additional functionality for professional recording applications.

378

379

**Usage Examples:**

380

381

```typescript

382

// Real-time audio analysis during recording

383

let audioContext;

384

let analyser;

385

let dataArray;

386

387

record.on("device-ready", (stream) => {

388

// Set up real-time analysis

389

audioContext = new AudioContext();

390

const source = audioContext.createMediaStreamSource(stream);

391

analyser = audioContext.createAnalyser();

392

393

analyser.fftSize = 256;

394

dataArray = new Uint8Array(analyser.frequencyBinCount);

395

396

source.connect(analyser);

397

398

// Start real-time visualization

399

visualizeAudio();

400

});

401

402

function visualizeAudio() {

403

if (!analyser) return;

404

405

analyser.getByteFrequencyData(dataArray);

406

407

// Update volume meter

408

const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;

409

updateVolumeMeter(average / 255);

410

411

// Continue visualization

412

requestAnimationFrame(visualizeAudio);

413

}

414

415

// Automatic recording triggers

416

let silenceTimeout;

417

const SILENCE_THRESHOLD = 10; // Volume threshold

418

const SILENCE_DURATION = 2000; // 2 seconds of silence

419

420

function startVoiceActivatedRecording() {

421

record.on("record-progress", () => {

422

analyser?.getByteFrequencyData(dataArray);

423

const volume = dataArray?.reduce((sum, value) => sum + value, 0) / (dataArray?.length || 1);

424

425

if (volume < SILENCE_THRESHOLD) {

426

// Start silence timer

427

if (!silenceTimeout) {

428

silenceTimeout = setTimeout(() => {

429

record.stopRecording();

430

console.log("Auto-stopped due to silence");

431

}, SILENCE_DURATION);

432

}

433

} else {

434

// Cancel silence timer on voice activity

435

if (silenceTimeout) {

436

clearTimeout(silenceTimeout);

437

silenceTimeout = null;

438

}

439

}

440

});

441

}

442

443

// Recording quality monitoring

444

record.on("record-data-available", (blob) => {

445

// Check recording quality

446

if (blob.size < 1000) { // Very small chunk

447

console.warn("Poor recording quality detected");

448

showQualityWarning();

449

}

450

});

451

452

// Multi-session recording management

453

class RecordingSession {

454

constructor() {

455

this.recordings = [];

456

this.currentSession = null;

457

}

458

459

startSession(name) {

460

this.currentSession = {

461

name,

462

startTime: Date.now(),

463

recordings: [],

464

};

465

}

466

467

addRecording(blob) {

468

if (this.currentSession) {

469

this.currentSession.recordings.push({

470

blob,

471

timestamp: Date.now(),

472

duration: blob.size / (128000 / 8), // Rough duration estimate

473

});

474

}

475

}

476

477

endSession() {

478

if (this.currentSession) {

479

this.recordings.push(this.currentSession);

480

this.currentSession = null;

481

}

482

}

483

484

exportSession(sessionName) {

485

const session = this.recordings.find(s => s.name === sessionName);

486

if (session) {

487

// Combine all recordings in session

488

return this.combineBlobs(session.recordings.map(r => r.blob));

489

}

490

}

491

}

492

493

// Recording backup and recovery

494

function setupRecordingBackup() {

495

record.on("record-data-available", (blob) => {

496

// Store chunks in IndexedDB for recovery

497

storeRecordingChunk(blob);

498

});

499

500

record.on("record-end", () => {

501

// Clear temporary chunks on successful completion

502

clearRecordingChunks();

503

});

504

505

// Recovery on page load

506

window.addEventListener("load", async () => {

507

const incompleteChunks = await getIncompleteRecordingChunks();

508

if (incompleteChunks.length > 0) {

509

const recovered = combineBlobs(incompleteChunks);

510

showRecoveryDialog(recovered);

511

}

512

});

513

}

514

515

// Format conversion and export

516

function exportRecording(blob, format = "wav") {

517

const audioUrl = URL.createObjectURL(blob);

518

const audio = new Audio(audioUrl);

519

520

audio.addEventListener("loadeddata", () => {

521

if (format === "wav") {

522

convertToWav(blob).then(wavBlob => {

523

downloadBlob(wavBlob, "recording.wav");

524

});

525

} else {

526

downloadBlob(blob, `recording.${format}`);

527

}

528

});

529

}

530

531

function downloadBlob(blob, filename) {

532

const url = URL.createObjectURL(blob);

533

const a = document.createElement("a");

534

a.href = url;

535

a.download = filename;

536

a.click();

537

URL.revokeObjectURL(url);

538

}

539

```