or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

audio.mdcore-management.mdentities.mdevents.mdindex.mdinteractions.mdmessaging.mdrestactions.mdsharding.md

audio.mddocs/

0

# Audio System

1

2

Voice channel connections with audio sending/receiving capabilities, including native audio processing and WebRTC support for Discord voice functionality.

3

4

## Capabilities

5

6

### Audio Manager

7

8

Core audio management interface for establishing and controlling voice connections.

9

10

```java { .api }

11

/**

12

* The AudioManager represents the connection to Discord voice servers.

13

*/

14

interface AudioManager {

15

/** Open audio connection to voice channel */

16

void openAudioConnection(VoiceChannel channel);

17

18

/** Close current audio connection */

19

void closeAudioConnection();

20

21

/** Check if connected to voice */

22

boolean isConnected();

23

24

/** Check if attempting to connect */

25

boolean isAttemptingToConnect();

26

27

/** Get connected voice channel */

28

VoiceChannel getConnectedChannel();

29

30

/** Get queued voice channel (while connecting) */

31

VoiceChannel getQueuedAudioConnection();

32

33

/** Set audio sending handler */

34

void setSendingHandler(AudioSendHandler sendHandler);

35

36

/** Set audio receiving handler */

37

void setReceivingHandler(AudioReceiveHandler receiveHandler);

38

39

/** Get current sending handler */

40

AudioSendHandler getSendingHandler();

41

42

/** Get current receiving handler */

43

AudioReceiveHandler getReceivingHandler();

44

45

/** Get connection status */

46

ConnectionStatus getConnectionStatus();

47

48

/** Set self-mute status */

49

void setSelfMuted(boolean muted);

50

51

/** Set self-deafen status */

52

void setSelfDeafened(boolean deafened);

53

54

/** Check if self-muted */

55

boolean isSelfMuted();

56

57

/** Check if self-deafened */

58

boolean isSelfDeafened();

59

60

/** Get guild this AudioManager belongs to */

61

Guild getGuild();

62

63

/** Get JDA instance */

64

JDA getJDA();

65

66

/** Set connection listener */

67

void setConnectionListener(ConnectionListener listener);

68

69

/** Get connection listener */

70

ConnectionListener getConnectionListener();

71

}

72

```

73

74

**Usage Examples:**

75

76

```java

77

import net.dv8tion.jda.api.managers.AudioManager;

78

import net.dv8tion.jda.api.entities.channel.concrete.VoiceChannel;

79

80

// Basic voice connection

81

Guild guild = event.getGuild();

82

AudioManager audioManager = guild.getAudioManager();

83

VoiceChannel voiceChannel = guild.getVoiceChannelById("123456789");

84

85

// Connect to voice channel

86

audioManager.openAudioConnection(voiceChannel);

87

88

// Check connection status

89

if (audioManager.isConnected()) {

90

System.out.println("Connected to: " + audioManager.getConnectedChannel().getName());

91

}

92

93

// Set bot as muted when joining

94

audioManager.setSelfMuted(true);

95

audioManager.openAudioConnection(voiceChannel);

96

97

// Disconnect from voice

98

audioManager.closeAudioConnection();

99

100

// Move to different voice channel

101

VoiceChannel newChannel = guild.getVoiceChannelById("987654321");

102

audioManager.openAudioConnection(newChannel); // Automatically disconnects from current

103

```

104

105

### Audio Send Handler

106

107

Interface for sending audio data to Discord voice channels.

108

109

```java { .api }

110

/**

111

* Interface for providing audio data to send to Discord.

112

*/

113

interface AudioSendHandler {

114

/** Check if audio data is available to send */

115

boolean canProvide();

116

117

/** Provide 20ms of audio data */

118

ByteBuffer provide20MsAudio();

119

120

/** Check if audio data is Opus encoded */

121

boolean isOpus();

122

}

123

```

124

125

**Usage Examples:**

126

127

```java

128

import net.dv8tion.jda.api.audio.AudioSendHandler;

129

import java.nio.ByteBuffer;

130

import java.util.Queue;

131

import java.util.concurrent.ConcurrentLinkedQueue;

132

133

public class AudioPlayer implements AudioSendHandler {

134

private final Queue<byte[]> audioQueue = new ConcurrentLinkedQueue<>();

135

private boolean isPlaying = false;

136

137

// Add audio data to queue (from file, stream, etc.)

138

public void queueAudio(byte[] audioData) {

139

audioQueue.offer(audioData);

140

isPlaying = true;

141

}

142

143

public void stop() {

144

isPlaying = false;

145

audioQueue.clear();

146

}

147

148

@Override

149

public boolean canProvide() {

150

return isPlaying && !audioQueue.isEmpty();

151

}

152

153

@Override

154

public ByteBuffer provide20MsAudio() {

155

byte[] audioData = audioQueue.poll();

156

if (audioData == null) {

157

isPlaying = false;

158

return null;

159

}

160

return ByteBuffer.wrap(audioData);

161

}

162

163

@Override

164

public boolean isOpus() {

165

return true; // Return true if audio is pre-encoded in Opus

166

}

167

}

168

169

// Usage with AudioManager

170

AudioPlayer player = new AudioPlayer();

171

audioManager.setSendingHandler(player);

172

173

// Load and play audio file

174

try {

175

byte[] audioData = loadAudioFile("music.opus");

176

player.queueAudio(audioData);

177

} catch (IOException e) {

178

System.err.println("Failed to load audio: " + e.getMessage());

179

}

180

181

// Stop playback

182

player.stop();

183

audioManager.setSendingHandler(null);

184

```

185

186

### Audio Receive Handler

187

188

Interface for receiving audio data from Discord voice channels.

189

190

```java { .api }

191

/**

192

* Interface for receiving audio data from Discord.

193

*/

194

interface AudioReceiveHandler {

195

/** Check if handler wants to receive user audio */

196

boolean canReceiveUser();

197

198

/** Check if handler wants to receive combined audio */

199

boolean canReceiveCombined();

200

201

/** Handle audio from specific user */

202

void handleUserAudio(UserAudio userAudio);

203

204

/** Handle combined audio from all users */

205

void handleCombinedAudio(CombinedAudio combinedAudio);

206

207

/** Include self in combined audio */

208

boolean includeUserInCombinedAudio(User user);

209

}

210

211

/**

212

* Represents audio data from a specific user.

213

*/

214

interface UserAudio {

215

/** Get user who sent the audio */

216

User getUser();

217

218

/** Get audio data */

219

short[] getAudioData(double volume);

220

byte[] getAudioData(AudioFormat format);

221

222

/** Get audio format */

223

AudioFormat getFormat();

224

}

225

226

/**

227

* Represents combined audio from multiple users.

228

*/

229

interface CombinedAudio {

230

/** Get list of users included in combined audio */

231

List<User> getUsers();

232

233

/** Get combined audio data */

234

short[] getAudioData(double volume);

235

byte[] getAudioData(AudioFormat format);

236

237

/** Get audio format */

238

AudioFormat getFormat();

239

}

240

```

241

242

**Usage Examples:**

243

244

```java

245

import net.dv8tion.jda.api.audio.AudioReceiveHandler;

246

import net.dv8tion.jda.api.audio.UserAudio;

247

import net.dv8tion.jda.api.audio.CombinedAudio;

248

import javax.sound.sampled.AudioFormat;

249

import java.io.FileOutputStream;

250

import java.io.IOException;

251

252

public class AudioRecorder implements AudioReceiveHandler {

253

private final Map<User, FileOutputStream> userRecordings = new HashMap<>();

254

private FileOutputStream combinedRecording;

255

private boolean recording = false;

256

257

public void startRecording() {

258

recording = true;

259

try {

260

combinedRecording = new FileOutputStream("combined_recording.wav");

261

} catch (IOException e) {

262

System.err.println("Failed to start recording: " + e.getMessage());

263

}

264

}

265

266

public void stopRecording() {

267

recording = false;

268

269

// Close all user recordings

270

for (FileOutputStream stream : userRecordings.values()) {

271

try {

272

stream.close();

273

} catch (IOException e) {

274

System.err.println("Error closing user recording: " + e.getMessage());

275

}

276

}

277

userRecordings.clear();

278

279

// Close combined recording

280

if (combinedRecording != null) {

281

try {

282

combinedRecording.close();

283

} catch (IOException e) {

284

System.err.println("Error closing combined recording: " + e.getMessage());

285

}

286

}

287

}

288

289

@Override

290

public boolean canReceiveUser() {

291

return recording;

292

}

293

294

@Override

295

public boolean canReceiveCombined() {

296

return recording;

297

}

298

299

@Override

300

public void handleUserAudio(UserAudio userAudio) {

301

if (!recording) return;

302

303

User user = userAudio.getUser();

304

305

// Create individual recording file for each user

306

if (!userRecordings.containsKey(user)) {

307

try {

308

String filename = "recording_" + user.getName() + ".wav";

309

userRecordings.put(user, new FileOutputStream(filename));

310

} catch (IOException e) {

311

System.err.println("Failed to create recording for " + user.getName());

312

return;

313

}

314

}

315

316

// Write audio data

317

try {

318

FileOutputStream stream = userRecordings.get(user);

319

byte[] audioData = userAudio.getAudioData(1.0);

320

stream.write(audioData);

321

} catch (IOException e) {

322

System.err.println("Error writing audio for " + user.getName());

323

}

324

}

325

326

@Override

327

public void handleCombinedAudio(CombinedAudio combinedAudio) {

328

if (!recording || combinedRecording == null) return;

329

330

try {

331

byte[] audioData = combinedAudio.getAudioData(1.0);

332

combinedRecording.write(audioData);

333

} catch (IOException e) {

334

System.err.println("Error writing combined audio");

335

}

336

}

337

338

@Override

339

public boolean includeUserInCombinedAudio(User user) {

340

// Include all users except bots

341

return !user.isBot();

342

}

343

}

344

345

// Usage

346

AudioRecorder recorder = new AudioRecorder();

347

audioManager.setReceivingHandler(recorder);

348

349

// Start recording

350

recorder.startRecording();

351

352

// Stop recording after some time

353

Timer timer = new Timer();

354

timer.schedule(new TimerTask() {

355

@Override

356

public void run() {

357

recorder.stopRecording();

358

audioManager.setReceivingHandler(null);

359

}

360

}, 60000); // Record for 1 minute

361

```

362

363

### Audio Connection Listener

364

365

Interface for monitoring voice connection events and status changes.

366

367

```java { .api }

368

/**

369

* Listener for audio connection events.

370

*/

371

interface ConnectionListener {

372

/** Called when connection is established */

373

void onStatusChange(ConnectionStatus status);

374

375

/** Called when ping is received */

376

void onPing(long ping);

377

378

/** Called when speaking state changes */

379

void onUserSpeaking(User user, boolean speaking);

380

}

381

382

/**

383

* Represents connection status.

384

*/

385

enum ConnectionStatus {

386

NOT_CONNECTED,

387

CONNECTING_ATTEMPTING_UDP_DISCOVERY,

388

CONNECTING_AWAITING_WEBSOCKET_CONNECT,

389

CONNECTING_AWAITING_AUTHENTICATION,

390

CONNECTING_AWAITING_ENDPOINT,

391

CONNECTING_ATTEMPTING_ENDPOINT_CONNECTION,

392

CONNECTED,

393

DISCONNECTED_LOST_CONNECTION,

394

DISCONNECTED_KICKED_FROM_CHANNEL,

395

DISCONNECTED_CHANNEL_DELETED,

396

DISCONNECTED_LEFT_CHANNEL,

397

AUDIO_REGION_CHANGE,

398

ERROR_LOST_CONNECTION,

399

ERROR_CANNOT_RESUME,

400

ERROR_WEBSOCKET_UNABLE_TO_CONNECT,

401

ERROR_UDP_UNABLE_TO_CONNECT,

402

ERROR_CONNECTION_TIMEOUT,

403

ERROR_CONNECTION_FAILED,

404

SHUTDOWN

405

}

406

```

407

408

**Usage Examples:**

409

410

```java

411

import net.dv8tion.jda.api.audio.hooks.ConnectionListener;

412

import net.dv8tion.jda.api.managers.AudioManager.ConnectionStatus;

413

414

public class VoiceConnectionMonitor implements AudioConnectionListener {

415

private final TextChannel logChannel;

416

417

public VoiceConnectionMonitor(TextChannel logChannel) {

418

this.logChannel = logChannel;

419

}

420

421

@Override

422

public void onStatusChange(ConnectionStatus status) {

423

switch (status) {

424

case CONNECTED -> {

425

logChannel.sendMessage("✅ Connected to voice channel successfully!").queue();

426

System.out.println("Voice connection established");

427

}

428

case CONNECTING_ATTEMPTING_UDP_DISCOVERY -> {

429

System.out.println("Attempting UDP discovery...");

430

}

431

case DISCONNECTED_LOST_CONNECTION -> {

432

logChannel.sendMessage("⚠️ Lost connection to voice channel").queue();

433

System.out.println("Lost voice connection");

434

}

435

case DISCONNECTED_KICKED_FROM_CHANNEL -> {

436

logChannel.sendMessage("❌ Bot was kicked from voice channel").queue();

437

}

438

case ERROR_CONNECTION_FAILED -> {

439

logChannel.sendMessage("❌ Failed to connect to voice channel").queue();

440

System.err.println("Voice connection failed");

441

}

442

default -> {

443

System.out.println("Voice connection status: " + status);

444

}

445

}

446

}

447

448

@Override

449

public void onPing(long ping) {

450

System.out.println("Voice ping: " + ping + "ms");

451

452

// Alert if ping is too high

453

if (ping > 200) {

454

logChannel.sendMessage("⚠️ High voice latency: " + ping + "ms").queue();

455

}

456

}

457

458

@Override

459

public void onUserSpeaking(User user, boolean speaking) {

460

if (speaking) {

461

System.out.println(user.getName() + " started speaking");

462

} else {

463

System.out.println(user.getName() + " stopped speaking");

464

}

465

}

466

}

467

468

// Usage

469

VoiceConnectionMonitor monitor = new VoiceConnectionMonitor(logChannel);

470

audioManager.setConnectionListener(monitor);

471

472

// Connect with monitoring

473

audioManager.openAudioConnection(voiceChannel);

474

```

475

476

### Advanced Audio Features

477

478

Additional audio system features for enhanced voice functionality.

479

480

```java { .api }

481

/**

482

* Audio format configuration.

483

*/

484

class AudioFormat {

485

/** Standard Discord audio format (48kHz, 16-bit, stereo) */

486

static AudioFormat DISCORD_PCM_FORMAT = new AudioFormat(48000.0f, 16, 2, true, true);

487

488

/** Get sample rate */

489

float getSampleRate();

490

491

/** Get sample size in bits */

492

int getSampleSizeInBits();

493

494

/** Get number of channels */

495

int getChannels();

496

497

/** Check if signed */

498

boolean isSigned();

499

500

/** Check if big endian */

501

boolean isBigEndian();

502

}

503

504

/**

505

* Audio utilities for common operations.

506

*/

507

class AudioUtils {

508

/** Convert PCM to Opus */

509

static byte[] convertPcmToOpus(byte[] pcmData, AudioFormat format);

510

511

/** Convert Opus to PCM */

512

static byte[] convertOpusToPcm(byte[] opusData);

513

514

/** Adjust audio volume */

515

static short[] adjustVolume(short[] audioData, double volume);

516

517

/** Mix multiple audio streams */

518

static short[] mixAudio(List<short[]> audioStreams);

519

520

/** Normalize audio levels */

521

static short[] normalizeAudio(short[] audioData);

522

}

523

```

524

525

**Usage Examples:**

526

527

```java

528

// Custom audio processing

529

public class AdvancedAudioPlayer implements AudioSendHandler {

530

private Queue<short[]> audioQueue = new ConcurrentLinkedQueue<>();

531

private double volume = 1.0;

532

533

public void setVolume(double volume) {

534

this.volume = Math.max(0.0, Math.min(2.0, volume)); // Clamp between 0-200%

535

}

536

537

public void queuePcmAudio(short[] pcmData) {

538

// Apply volume adjustment

539

short[] adjustedAudio = AudioUtils.adjustVolume(pcmData, volume);

540

audioQueue.offer(adjustedAudio);

541

}

542

543

@Override

544

public boolean canProvide() {

545

return !audioQueue.isEmpty();

546

}

547

548

@Override

549

public ByteBuffer provide20MsAudio() {

550

short[] audioData = audioQueue.poll();

551

if (audioData == null) return null;

552

553

// Convert to byte buffer

554

ByteBuffer buffer = ByteBuffer.allocate(audioData.length * 2);

555

for (short sample : audioData) {

556

buffer.putShort(sample);

557

}

558

buffer.flip();

559

return buffer;

560

}

561

562

@Override

563

public boolean isOpus() {

564

return false; // We're providing PCM data

565

}

566

}

567

568

// Audio mixing example

569

public class AudioMixer implements AudioReceiveHandler {

570

private final Map<User, Queue<short[]>> userBuffers = new HashMap<>();

571

private final AudioSendHandler outputHandler;

572

573

public AudioMixer(AudioSendHandler outputHandler) {

574

this.outputHandler = outputHandler;

575

}

576

577

@Override

578

public boolean canReceiveUser() {

579

return true;

580

}

581

582

@Override

583

public boolean canReceiveCombined() {

584

return false; // We'll handle mixing ourselves

585

}

586

587

@Override

588

public void handleUserAudio(UserAudio userAudio) {

589

User user = userAudio.getUser();

590

short[] audioData = userAudio.getAudioData(1.0);

591

592

// Buffer audio for mixing

593

userBuffers.computeIfAbsent(user, k -> new ConcurrentLinkedQueue<>())

594

.offer(audioData);

595

596

// Mix when we have enough data

597

mixAndOutput();

598

}

599

600

private void mixAndOutput() {

601

List<short[]> audioToMix = new ArrayList<>();

602

603

// Collect audio from all users

604

for (Queue<short[]> buffer : userBuffers.values()) {

605

short[] audio = buffer.poll();

606

if (audio != null) {

607

audioToMix.add(audio);

608

}

609

}

610

611

if (!audioToMix.isEmpty()) {

612

// Mix all audio streams

613

short[] mixedAudio = AudioUtils.mixAudio(audioToMix);

614

615

// Send mixed audio back

616

if (outputHandler instanceof AdvancedAudioPlayer) {

617

((AdvancedAudioPlayer) outputHandler).queuePcmAudio(mixedAudio);

618

}

619

}

620

}

621

622

@Override

623

public void handleCombinedAudio(CombinedAudio combinedAudio) {

624

// Not used in this implementation

625

}

626

627

@Override

628

public boolean includeUserInCombinedAudio(User user) {

629

return false; // We handle mixing manually

630

}

631

}

632

```

633

634

## Types

635

636

```java { .api }

637

// Audio connection status enumeration

638

enum ConnectionStatus {

639

NOT_CONNECTED,

640

CONNECTING_ATTEMPTING_UDP_DISCOVERY,

641

CONNECTING_AWAITING_WEBSOCKET_CONNECT,

642

CONNECTING_AWAITING_AUTHENTICATION,

643

CONNECTING_AWAITING_ENDPOINT,

644

CONNECTING_ATTEMPTING_ENDPOINT_CONNECTION,

645

CONNECTED,

646

DISCONNECTED_LOST_CONNECTION,

647

DISCONNECTED_KICKED_FROM_CHANNEL,

648

DISCONNECTED_CHANNEL_DELETED,

649

DISCONNECTED_LEFT_CHANNEL,

650

AUDIO_REGION_CHANGE,

651

ERROR_LOST_CONNECTION,

652

ERROR_CANNOT_RESUME,

653

ERROR_WEBSOCKET_UNABLE_TO_CONNECT,

654

ERROR_UDP_UNABLE_TO_CONNECT,

655

ERROR_CONNECTION_TIMEOUT,

656

ERROR_CONNECTION_FAILED,

657

SHUTDOWN

658

}

659

660

// Audio format specifications

661

class AudioFormat {

662

public static final AudioFormat DISCORD_PCM_FORMAT = new AudioFormat(

663

48000.0f, // 48kHz sample rate

664

16, // 16-bit samples

665

2, // Stereo (2 channels)

666

true, // Signed

667

true // Big endian

668

);

669

}

670

671

// Voice region information

672

enum Region {

673

AMSTERDAM("amsterdam", "Amsterdam", "🇳🇱", false),

674

BRAZIL("brazil", "Brazil", "🇧🇷", false),

675

DUBAI("dubai", "Dubai", "🇦🇪", false),

676

EU_CENTRAL("eu-central", "Central Europe", "🇪🇺", false),

677

EU_WEST("eu-west", "Western Europe", "🇪🇺", false),

678

EUROPE("europe", "Europe", "🇪🇺", false),

679

FRANKFURT("frankfurt", "Frankfurt", "🇩🇪", false),

680

HONGKONG("hongkong", "Hong Kong", "🇭🇰", false),

681

INDIA("india", "India", "🇮🇳", false),

682

JAPAN("japan", "Japan", "🇯🇵", false),

683

LONDON("london", "London", "🇬🇧", false),

684

RUSSIA("russia", "Russia", "🇷🇺", false),

685

SINGAPORE("singapore", "Singapore", "🇸🇬", false),

686

SOUTH_KOREA("south-korea", "South Korea", "🇰🇷", false),

687

SYDNEY("sydney", "Sydney", "🇦🇺", false),

688

US_CENTRAL("us-central", "US Central", "🇺🇸", false),

689

US_EAST("us-east", "US East", "🇺🇸", false),

690

US_SOUTH("us-south", "US South", "🇺🇸", false),

691

US_WEST("us-west", "US West", "🇺🇸", false),

692

VIP_AMSTERDAM("vip-amsterdam", "Amsterdam (VIP)", "🇳🇱", true),

693

VIP_US_EAST("vip-us-east", "US East (VIP)", "🇺🇸", true),

694

VIP_US_WEST("vip-us-west", "US West (VIP)", "🇺🇸", true);

695

696

/** Get region key */

697

String getKey();

698

699

/** Get region name */

700

String getName();

701

702

/** Get region emoji */

703

String getEmoji();

704

705

/** Check if VIP region */

706

boolean isVip();

707

}

708

```