CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-net-dv8tion--jda

Java Discord API - A comprehensive Java library for building Discord bots and applications

Pending
Overview
Eval results
Files

audio.mddocs/

Audio System

Voice channel connections with audio sending/receiving capabilities, including native audio processing and WebRTC support for Discord voice functionality.

Capabilities

Audio Manager

Core audio management interface for establishing and controlling voice connections.

/**
 * The AudioManager represents the connection to Discord voice servers.
 */
interface AudioManager {
    /** Open audio connection to voice channel */
    void openAudioConnection(VoiceChannel channel);
    
    /** Close current audio connection */
    void closeAudioConnection();
    
    /** Check if connected to voice */
    boolean isConnected();
    
    /** Check if attempting to connect */
    boolean isAttemptingToConnect();
    
    /** Get connected voice channel */
    VoiceChannel getConnectedChannel();
    
    /** Get queued voice channel (while connecting) */
    VoiceChannel getQueuedAudioConnection();
    
    /** Set audio sending handler */
    void setSendingHandler(AudioSendHandler sendHandler);
    
    /** Set audio receiving handler */
    void setReceivingHandler(AudioReceiveHandler receiveHandler);
    
    /** Get current sending handler */
    AudioSendHandler getSendingHandler();
    
    /** Get current receiving handler */
    AudioReceiveHandler getReceivingHandler();
    
    /** Get connection status */
    ConnectionStatus getConnectionStatus();
    
    /** Set self-mute status */
    void setSelfMuted(boolean muted);
    
    /** Set self-deafen status */
    void setSelfDeafened(boolean deafened);
    
    /** Check if self-muted */
    boolean isSelfMuted();
    
    /** Check if self-deafened */
    boolean isSelfDeafened();
    
    /** Get guild this AudioManager belongs to */
    Guild getGuild();
    
    /** Get JDA instance */
    JDA getJDA();
    
    /** Set connection listener */
    void setConnectionListener(ConnectionListener listener);
    
    /** Get connection listener */
    ConnectionListener getConnectionListener();
}

Usage Examples:

import net.dv8tion.jda.api.managers.AudioManager;
import net.dv8tion.jda.api.entities.channel.concrete.VoiceChannel;

// Basic voice connection
Guild guild = event.getGuild();
AudioManager audioManager = guild.getAudioManager();
VoiceChannel voiceChannel = guild.getVoiceChannelById("123456789");

// Connect to voice channel
audioManager.openAudioConnection(voiceChannel);

// Check connection status
if (audioManager.isConnected()) {
    System.out.println("Connected to: " + audioManager.getConnectedChannel().getName());
}

// Set bot as muted when joining
audioManager.setSelfMuted(true);
audioManager.openAudioConnection(voiceChannel);

// Disconnect from voice
audioManager.closeAudioConnection();

// Move to different voice channel
VoiceChannel newChannel = guild.getVoiceChannelById("987654321");
audioManager.openAudioConnection(newChannel); // Automatically disconnects from current

Audio Send Handler

Interface for sending audio data to Discord voice channels.

/**
 * Interface for providing audio data to send to Discord.
 */
interface AudioSendHandler {
    /** Check if audio data is available to send */
    boolean canProvide();
    
    /** Provide 20ms of audio data */
    ByteBuffer provide20MsAudio();
    
    /** Check if audio data is Opus encoded */
    boolean isOpus();
}

Usage Examples:

import net.dv8tion.jda.api.audio.AudioSendHandler;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;

public class AudioPlayer implements AudioSendHandler {
    private final Queue<byte[]> audioQueue = new ConcurrentLinkedQueue<>();
    private boolean isPlaying = false;
    
    // Add audio data to queue (from file, stream, etc.)
    public void queueAudio(byte[] audioData) {
        audioQueue.offer(audioData);
        isPlaying = true;
    }
    
    public void stop() {
        isPlaying = false;
        audioQueue.clear();
    }
    
    @Override
    public boolean canProvide() {
        return isPlaying && !audioQueue.isEmpty();
    }
    
    @Override
    public ByteBuffer provide20MsAudio() {
        byte[] audioData = audioQueue.poll();
        if (audioData == null) {
            isPlaying = false;
            return null;
        }
        return ByteBuffer.wrap(audioData);
    }
    
    @Override
    public boolean isOpus() {
        return true; // Return true if audio is pre-encoded in Opus
    }
}

// Usage with AudioManager
AudioPlayer player = new AudioPlayer();
audioManager.setSendingHandler(player);

// Load and play audio file
try {
    byte[] audioData = loadAudioFile("music.opus");
    player.queueAudio(audioData);
} catch (IOException e) {
    System.err.println("Failed to load audio: " + e.getMessage());
}

// Stop playback
player.stop();
audioManager.setSendingHandler(null);

Audio Receive Handler

Interface for receiving audio data from Discord voice channels.

/**
 * Interface for receiving audio data from Discord.
 */
interface AudioReceiveHandler {
    /** Check if handler wants to receive user audio */
    boolean canReceiveUser();
    
    /** Check if handler wants to receive combined audio */
    boolean canReceiveCombined();
    
    /** Handle audio from specific user */
    void handleUserAudio(UserAudio userAudio);
    
    /** Handle combined audio from all users */
    void handleCombinedAudio(CombinedAudio combinedAudio);
    
    /** Include self in combined audio */
    boolean includeUserInCombinedAudio(User user);
}

/**
 * Represents audio data from a specific user.
 */
interface UserAudio {
    /** Get user who sent the audio */
    User getUser();
    
    /** Get audio data */
    short[] getAudioData(double volume);
    byte[] getAudioData(AudioFormat format);
    
    /** Get audio format */
    AudioFormat getFormat();
}

/**
 * Represents combined audio from multiple users.
 */
interface CombinedAudio {
    /** Get list of users included in combined audio */
    List<User> getUsers();
    
    /** Get combined audio data */
    short[] getAudioData(double volume);
    byte[] getAudioData(AudioFormat format);
    
    /** Get audio format */
    AudioFormat getFormat();
}

Usage Examples:

import net.dv8tion.jda.api.audio.AudioReceiveHandler;
import net.dv8tion.jda.api.audio.UserAudio;
import net.dv8tion.jda.api.audio.CombinedAudio;
import javax.sound.sampled.AudioFormat;
import java.io.FileOutputStream;
import java.io.IOException;

public class AudioRecorder implements AudioReceiveHandler {
    private final Map<User, FileOutputStream> userRecordings = new HashMap<>();
    private FileOutputStream combinedRecording;
    private boolean recording = false;
    
    public void startRecording() {
        recording = true;
        try {
            combinedRecording = new FileOutputStream("combined_recording.wav");
        } catch (IOException e) {
            System.err.println("Failed to start recording: " + e.getMessage());
        }
    }
    
    public void stopRecording() {
        recording = false;
        
        // Close all user recordings
        for (FileOutputStream stream : userRecordings.values()) {
            try {
                stream.close();
            } catch (IOException e) {
                System.err.println("Error closing user recording: " + e.getMessage());
            }
        }
        userRecordings.clear();
        
        // Close combined recording
        if (combinedRecording != null) {
            try {
                combinedRecording.close();
            } catch (IOException e) {
                System.err.println("Error closing combined recording: " + e.getMessage());
            }
        }
    }
    
    @Override
    public boolean canReceiveUser() {
        return recording;
    }
    
    @Override
    public boolean canReceiveCombined() {
        return recording;
    }
    
    @Override
    public void handleUserAudio(UserAudio userAudio) {
        if (!recording) return;
        
        User user = userAudio.getUser();
        
        // Create individual recording file for each user
        if (!userRecordings.containsKey(user)) {
            try {
                String filename = "recording_" + user.getName() + ".wav";
                userRecordings.put(user, new FileOutputStream(filename));
            } catch (IOException e) {
                System.err.println("Failed to create recording for " + user.getName());
                return;
            }
        }
        
        // Write audio data
        try {
            FileOutputStream stream = userRecordings.get(user);
            byte[] audioData = userAudio.getAudioData(1.0);
            stream.write(audioData);
        } catch (IOException e) {
            System.err.println("Error writing audio for " + user.getName());
        }
    }
    
    @Override
    public void handleCombinedAudio(CombinedAudio combinedAudio) {
        if (!recording || combinedRecording == null) return;
        
        try {
            byte[] audioData = combinedAudio.getAudioData(1.0);
            combinedRecording.write(audioData);
        } catch (IOException e) {
            System.err.println("Error writing combined audio");
        }
    }
    
    @Override
    public boolean includeUserInCombinedAudio(User user) {
        // Include all users except bots
        return !user.isBot();
    }
}

// Usage
AudioRecorder recorder = new AudioRecorder();
audioManager.setReceivingHandler(recorder);

// Start recording
recorder.startRecording();

// Stop recording after some time
Timer timer = new Timer();
timer.schedule(new TimerTask() {
    @Override
    public void run() {
        recorder.stopRecording();
        audioManager.setReceivingHandler(null);
    }
}, 60000); // Record for 1 minute

Audio Connection Listener

Interface for monitoring voice connection events and status changes.

/**
 * Listener for audio connection events.
 */
interface ConnectionListener {
    /** Called when connection is established */
    void onStatusChange(ConnectionStatus status);
    
    /** Called when ping is received */
    void onPing(long ping);
    
    /** Called when speaking state changes */
    void onUserSpeaking(User user, boolean speaking);
}

/**
 * Represents connection status.
 */
enum ConnectionStatus {
    NOT_CONNECTED,
    CONNECTING_ATTEMPTING_UDP_DISCOVERY,
    CONNECTING_AWAITING_WEBSOCKET_CONNECT,
    CONNECTING_AWAITING_AUTHENTICATION,
    CONNECTING_AWAITING_ENDPOINT,
    CONNECTING_ATTEMPTING_ENDPOINT_CONNECTION,
    CONNECTED,
    DISCONNECTED_LOST_CONNECTION,
    DISCONNECTED_KICKED_FROM_CHANNEL,
    DISCONNECTED_CHANNEL_DELETED,
    DISCONNECTED_LEFT_CHANNEL,
    AUDIO_REGION_CHANGE,
    ERROR_LOST_CONNECTION,
    ERROR_CANNOT_RESUME,
    ERROR_WEBSOCKET_UNABLE_TO_CONNECT,
    ERROR_UDP_UNABLE_TO_CONNECT,
    ERROR_CONNECTION_TIMEOUT,
    ERROR_CONNECTION_FAILED,
    SHUTDOWN
}

Usage Examples:

import net.dv8tion.jda.api.audio.hooks.ConnectionListener;
import net.dv8tion.jda.api.managers.AudioManager.ConnectionStatus;

public class VoiceConnectionMonitor implements AudioConnectionListener {
    private final TextChannel logChannel;
    
    public VoiceConnectionMonitor(TextChannel logChannel) {
        this.logChannel = logChannel;
    }
    
    @Override
    public void onStatusChange(ConnectionStatus status) {
        switch (status) {
            case CONNECTED -> {
                logChannel.sendMessage("✅ Connected to voice channel successfully!").queue();
                System.out.println("Voice connection established");
            }
            case CONNECTING_ATTEMPTING_UDP_DISCOVERY -> {
                System.out.println("Attempting UDP discovery...");
            }
            case DISCONNECTED_LOST_CONNECTION -> {
                logChannel.sendMessage("⚠️ Lost connection to voice channel").queue();
                System.out.println("Lost voice connection");
            }
            case DISCONNECTED_KICKED_FROM_CHANNEL -> {
                logChannel.sendMessage("❌ Bot was kicked from voice channel").queue();
            }
            case ERROR_CONNECTION_FAILED -> {
                logChannel.sendMessage("❌ Failed to connect to voice channel").queue();
                System.err.println("Voice connection failed");
            }
            default -> {
                System.out.println("Voice connection status: " + status);
            }
        }
    }
    
    @Override
    public void onPing(long ping) {
        System.out.println("Voice ping: " + ping + "ms");
        
        // Alert if ping is too high
        if (ping > 200) {
            logChannel.sendMessage("⚠️ High voice latency: " + ping + "ms").queue();
        }
    }
    
    @Override
    public void onUserSpeaking(User user, boolean speaking) {
        if (speaking) {
            System.out.println(user.getName() + " started speaking");
        } else {
            System.out.println(user.getName() + " stopped speaking");
        }
    }
}

// Usage
VoiceConnectionMonitor monitor = new VoiceConnectionMonitor(logChannel);
audioManager.setConnectionListener(monitor);

// Connect with monitoring
audioManager.openAudioConnection(voiceChannel);

Advanced Audio Features

Additional audio system features for enhanced voice functionality.

/**
 * Audio format configuration.
 */
class AudioFormat {
    /** Standard Discord audio format (48kHz, 16-bit, stereo) */
    static AudioFormat DISCORD_PCM_FORMAT = new AudioFormat(48000.0f, 16, 2, true, true);
    
    /** Get sample rate */
    float getSampleRate();
    
    /** Get sample size in bits */
    int getSampleSizeInBits();
    
    /** Get number of channels */
    int getChannels();
    
    /** Check if signed */
    boolean isSigned();
    
    /** Check if big endian */
    boolean isBigEndian();
}

/**
 * Audio utilities for common operations.
 */
class AudioUtils {
    /** Convert PCM to Opus */
    static byte[] convertPcmToOpus(byte[] pcmData, AudioFormat format);
    
    /** Convert Opus to PCM */
    static byte[] convertOpusToPcm(byte[] opusData);
    
    /** Adjust audio volume */
    static short[] adjustVolume(short[] audioData, double volume);
    
    /** Mix multiple audio streams */
    static short[] mixAudio(List<short[]> audioStreams);
    
    /** Normalize audio levels */
    static short[] normalizeAudio(short[] audioData);
}

Usage Examples:

// Custom audio processing
public class AdvancedAudioPlayer implements AudioSendHandler {
    private Queue<short[]> audioQueue = new ConcurrentLinkedQueue<>();
    private double volume = 1.0;
    
    public void setVolume(double volume) {
        this.volume = Math.max(0.0, Math.min(2.0, volume)); // Clamp between 0-200%
    }
    
    public void queuePcmAudio(short[] pcmData) {
        // Apply volume adjustment
        short[] adjustedAudio = AudioUtils.adjustVolume(pcmData, volume);
        audioQueue.offer(adjustedAudio);
    }
    
    @Override
    public boolean canProvide() {
        return !audioQueue.isEmpty();
    }
    
    @Override
    public ByteBuffer provide20MsAudio() {
        short[] audioData = audioQueue.poll();
        if (audioData == null) return null;
        
        // Convert to byte buffer
        ByteBuffer buffer = ByteBuffer.allocate(audioData.length * 2);
        for (short sample : audioData) {
            buffer.putShort(sample);
        }
        buffer.flip();
        return buffer;
    }
    
    @Override
    public boolean isOpus() {
        return false; // We're providing PCM data
    }
}

// Audio mixing example
public class AudioMixer implements AudioReceiveHandler {
    private final Map<User, Queue<short[]>> userBuffers = new HashMap<>();
    private final AudioSendHandler outputHandler;
    
    public AudioMixer(AudioSendHandler outputHandler) {
        this.outputHandler = outputHandler;
    }
    
    @Override
    public boolean canReceiveUser() {
        return true;
    }
    
    @Override
    public boolean canReceiveCombined() {
        return false; // We'll handle mixing ourselves
    }
    
    @Override
    public void handleUserAudio(UserAudio userAudio) {
        User user = userAudio.getUser();
        short[] audioData = userAudio.getAudioData(1.0);
        
        // Buffer audio for mixing
        userBuffers.computeIfAbsent(user, k -> new ConcurrentLinkedQueue<>())
                  .offer(audioData);
        
        // Mix when we have enough data
        mixAndOutput();
    }
    
    private void mixAndOutput() {
        List<short[]> audioToMix = new ArrayList<>();
        
        // Collect audio from all users
        for (Queue<short[]> buffer : userBuffers.values()) {
            short[] audio = buffer.poll();
            if (audio != null) {
                audioToMix.add(audio);
            }
        }
        
        if (!audioToMix.isEmpty()) {
            // Mix all audio streams
            short[] mixedAudio = AudioUtils.mixAudio(audioToMix);
            
            // Send mixed audio back
            if (outputHandler instanceof AdvancedAudioPlayer) {
                ((AdvancedAudioPlayer) outputHandler).queuePcmAudio(mixedAudio);
            }
        }
    }
    
    @Override
    public void handleCombinedAudio(CombinedAudio combinedAudio) {
        // Not used in this implementation
    }
    
    @Override
    public boolean includeUserInCombinedAudio(User user) {
        return false; // We handle mixing manually
    }
}

Types

// Audio connection status enumeration
enum ConnectionStatus {
    NOT_CONNECTED,
    CONNECTING_ATTEMPTING_UDP_DISCOVERY,
    CONNECTING_AWAITING_WEBSOCKET_CONNECT, 
    CONNECTING_AWAITING_AUTHENTICATION,
    CONNECTING_AWAITING_ENDPOINT,
    CONNECTING_ATTEMPTING_ENDPOINT_CONNECTION,
    CONNECTED,
    DISCONNECTED_LOST_CONNECTION,
    DISCONNECTED_KICKED_FROM_CHANNEL,
    DISCONNECTED_CHANNEL_DELETED,
    DISCONNECTED_LEFT_CHANNEL,
    AUDIO_REGION_CHANGE,
    ERROR_LOST_CONNECTION,
    ERROR_CANNOT_RESUME,
    ERROR_WEBSOCKET_UNABLE_TO_CONNECT,
    ERROR_UDP_UNABLE_TO_CONNECT,
    ERROR_CONNECTION_TIMEOUT,
    ERROR_CONNECTION_FAILED,
    SHUTDOWN
}

// Audio format specifications
class AudioFormat {
    public static final AudioFormat DISCORD_PCM_FORMAT = new AudioFormat(
        48000.0f,  // 48kHz sample rate
        16,        // 16-bit samples
        2,         // Stereo (2 channels)
        true,      // Signed
        true       // Big endian
    );
}

// Voice region information
enum Region {
    AMSTERDAM("amsterdam", "Amsterdam", "🇳🇱", false),
    BRAZIL("brazil", "Brazil", "🇧🇷", false),
    DUBAI("dubai", "Dubai", "🇦🇪", false),
    EU_CENTRAL("eu-central", "Central Europe", "🇪🇺", false),
    EU_WEST("eu-west", "Western Europe", "🇪🇺", false),
    EUROPE("europe", "Europe", "🇪🇺", false),
    FRANKFURT("frankfurt", "Frankfurt", "🇩🇪", false),
    HONGKONG("hongkong", "Hong Kong", "🇭🇰", false),
    INDIA("india", "India", "🇮🇳", false),
    JAPAN("japan", "Japan", "🇯🇵", false),
    LONDON("london", "London", "🇬🇧", false),
    RUSSIA("russia", "Russia", "🇷🇺", false),
    SINGAPORE("singapore", "Singapore", "🇸🇬", false),
    SOUTH_KOREA("south-korea", "South Korea", "🇰🇷", false),
    SYDNEY("sydney", "Sydney", "🇦🇺", false),
    US_CENTRAL("us-central", "US Central", "🇺🇸", false),
    US_EAST("us-east", "US East", "🇺🇸", false),
    US_SOUTH("us-south", "US South", "🇺🇸", false),
    US_WEST("us-west", "US West", "🇺🇸", false),
    VIP_AMSTERDAM("vip-amsterdam", "Amsterdam (VIP)", "🇳🇱", true),
    VIP_US_EAST("vip-us-east", "US East (VIP)", "🇺🇸", true),
    VIP_US_WEST("vip-us-west", "US West (VIP)", "🇺🇸", true);
    
    /** Get region key */
    String getKey();
    
    /** Get region name */
    String getName();
    
    /** Get region emoji */
    String getEmoji();
    
    /** Check if VIP region */
    boolean isVip();
}

Install with Tessl CLI

npx tessl i tessl/maven-net-dv8tion--jda

docs

audio.md

core-management.md

entities.md

events.md

index.md

interactions.md

messaging.md

restactions.md

sharding.md

tile.json