A python interface to the mpv media player
—
OpenGL render context management for custom rendering scenarios and integration with graphics frameworks. Enables advanced video output control, custom rendering pipelines, and integration with OpenGL applications.
Create and manage OpenGL render contexts for custom video output.
class MpvRenderContext:
"""OpenGL render context for advanced rendering scenarios."""
def __init__(self, mpv: 'MPV', api_type: str, **kwargs):
"""
Initialize render context.
Parameters:
- mpv: MPV player instance
- api_type: Render API type ('opengl')
- kwargs: API-specific parameters
For OpenGL API:
- get_proc_address: Function to retrieve OpenGL function pointers
- gl_params: MpvOpenGLInitParams object
"""
def render(self, **kwargs) -> int:
"""
Render current frame to OpenGL context.
Parameters:
- kwargs: Render parameters (fbo, viewport, etc.)
Returns:
Render result code (0 for success)
"""
def update(self) -> bool:
"""
Update render context and check for new frames.
Returns:
True if new frame is available for rendering
"""
def report_swap(self):
"""Report that frame buffer swap has occurred."""
def free(self):
"""Free render context resources."""Send commands to audio and video filter chains for dynamic filter control.
def vf_command(self, label: str, command: str, argument):
"""
Send command to a video filter.
Parameters:
- label: Filter label/name in the filter chain
- command: Command name to send to the filter
- argument: Command argument/parameter
"""
def af_command(self, label: str, command: str, argument):
"""
Send command to an audio filter.
Parameters:
- label: Filter label/name in the filter chain
- command: Command name to send to the filter
- argument: Command argument/parameter
"""Parameter classes for configuring OpenGL rendering.
class MpvOpenGLInitParams:
"""OpenGL initialization parameters."""
def __init__(self, get_proc_address):
"""
Initialize OpenGL parameters.
Parameters:
- get_proc_address: Function pointer to get OpenGL function addresses
"""
class MpvOpenGLFBO:
"""OpenGL framebuffer object parameters."""
def __init__(self, w: int, h: int, fbo: int = 0, internal_format: int = 0):
"""
Initialize FBO parameters.
Parameters:
- w, h: Framebuffer dimensions
- fbo: Framebuffer object ID (0 for default framebuffer)
- internal_format: Internal format (0 for auto)
"""
class MpvRenderFrameInfo:
"""Information about rendered frame."""
def as_dict(self) -> dict:
"""
Get frame information as dictionary.
Returns:
Dictionary with frame metadata
"""
class MpvRenderParam:
"""Generic render parameter container."""
def __init__(self, name: str, value=None):
"""
Initialize render parameter.
Parameters:
- name: Parameter name
- value: Parameter value
"""
# Parameter type constants
TYPES = {
'invalid': 0,
'api_type': 1,
'opengl_init_params': 2,
'opengl_fbo': 3,
'flip_y': 4,
'depth': 5,
'icc_profile': 6,
'ambient_light': 7,
'x11_display': 8,
'wl_display': 9,
'advanced_control': 10,
'next_frame_info': 11,
'block_for_target_time': 12,
'skip_rendering': 13,
'opengl_drm_params': 14,
'opengl_drm_draw_surface_size': 15,
'opengl_drm_params_v2': 16,
'sw_size': 17,
'sw_format': 18,
'sw_stride': 19,
'sw_pointer': 20
}Classes for hardware-accelerated rendering on Linux.
class MpvOpenGLDRMParams:
"""DRM parameters for direct hardware rendering."""
class MpvOpenGLDRMDrawSurfaceSize:
"""DRM draw surface size specification."""
class MpvOpenGLDRMParamsV2:
"""Enhanced DRM parameters with additional features."""
def __init__(self, crtc_id: int, connector_id: int, atomic_request_ptr, fd: int = -1, render_fd: int = -1):
"""
Initialize DRM parameters.
Parameters:
- crtc_id: CRTC (display controller) ID
- connector_id: Display connector ID
- atomic_request_ptr: Atomic mode setting request
- fd: DRM file descriptor
- render_fd: Render node file descriptor
"""import mpv
import OpenGL.GL as gl
from OpenGL.GL import *
import ctypes
# OpenGL context setup (varies by platform/framework)
def setup_opengl_context():
"""Setup OpenGL context - implementation depends on your framework."""
# This is pseudocode - actual implementation depends on your OpenGL framework
# (pygame, GLFW, Qt, tkinter, etc.)
pass
def get_proc_address(name):
"""Get OpenGL function pointer."""
# Implementation depends on your OpenGL loading library
# For example, with GLFW:
# return glfw.get_proc_address(name.decode('utf-8'))
pass
# Initialize player and render context
player = mpv.MPV(vo='libmpv') # Use libmpv video output
# Create OpenGL initialization parameters
gl_init_params = mpv.MpvOpenGLInitParams(get_proc_address)
# Create render context
render_ctx = mpv.MpvRenderContext(
player,
'opengl',
opengl_init_params=gl_init_params
)
# Basic rendering loop
def render_frame():
"""Render mpv frame to current OpenGL context."""
# Check for new frame
if render_ctx.update():
# Get viewport dimensions
viewport = gl.glGetIntegerv(gl.GL_VIEWPORT)
width, height = viewport[2], viewport[3]
# Create FBO parameters for default framebuffer
fbo = mpv.MpvOpenGLFBO(width, height)
# Render frame
result = render_ctx.render(opengl_fbo=fbo)
# Report buffer swap
render_ctx.report_swap()
return result == 0 # Success
return False
# Usage in main loop
player.play('/path/to/video.mp4')
while True: # Your main loop
if render_frame():
# Frame was rendered
pass
# Handle other events, swap buffers, etc.
# This depends on your OpenGL frameworkimport numpy as np
class OffscreenRenderer:
def __init__(self, player, width, height):
self.player = player
self.width = width
self.height = height
# Create OpenGL resources
self.setup_fbo()
self.setup_render_context()
def setup_fbo(self):
"""Create framebuffer object for offscreen rendering."""
# Generate framebuffer
self.fbo = gl.glGenFramebuffers(1)
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.fbo)
# Create color texture
self.color_texture = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, self.color_texture)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA8,
self.width, self.height, 0,
gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, None)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR)
# Attach texture to framebuffer
gl.glFramebufferTexture2D(gl.GL_FRAMEBUFFER, gl.GL_COLOR_ATTACHMENT0,
gl.GL_TEXTURE_2D, self.color_texture, 0)
# Check framebuffer completeness
if gl.glCheckFramebufferStatus(gl.GL_FRAMEBUFFER) != gl.GL_FRAMEBUFFER_COMPLETE:
raise RuntimeError("Framebuffer not complete")
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0)
def setup_render_context(self):
"""Setup mpv render context."""
gl_init_params = mpv.MpvOpenGLInitParams(get_proc_address)
self.render_ctx = mpv.MpvRenderContext(
self.player,
'opengl',
opengl_init_params=gl_init_params
)
def render_to_texture(self):
"""Render current frame to texture."""
if not self.render_ctx.update():
return False
# Bind our framebuffer
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.fbo)
gl.glViewport(0, 0, self.width, self.height)
# Clear framebuffer
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
# Render mpv frame
fbo_params = mpv.MpvOpenGLFBO(self.width, self.height, self.fbo)
result = self.render_ctx.render(opengl_fbo=fbo_params)
# Report swap
self.render_ctx.report_swap()
# Restore default framebuffer
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0)
return result == 0
def read_pixels(self):
"""Read rendered pixels from texture."""
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.fbo)
# Read pixels
pixels = gl.glReadPixels(0, 0, self.width, self.height,
gl.GL_RGBA, gl.GL_UNSIGNED_BYTE)
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0)
# Convert to numpy array
pixel_array = np.frombuffer(pixels, dtype=np.uint8)
pixel_array = pixel_array.reshape((self.height, self.width, 4))
# Flip vertically (OpenGL convention)
pixel_array = np.flip(pixel_array, axis=0)
return pixel_array
def cleanup(self):
"""Clean up OpenGL resources."""
gl.glDeleteFramebuffers(1, [self.fbo])
gl.glDeleteTextures(1, [self.color_texture])
self.render_ctx.free()
# Usage
offscreen = OffscreenRenderer(player, 1920, 1080)
player.play('/path/to/video.mp4')
player.wait_until_playing()
# Render frames and capture
for i in range(100): # Capture 100 frames
if offscreen.render_to_texture():
pixels = offscreen.read_pixels()
# Save frame as image
from PIL import Image
img = Image.fromarray(pixels[:, :, :3]) # Remove alpha channel
img.save(f'frame_{i:04d}.png')
# Advance to next frame
player.frame_step()
offscreen.cleanup()class MultiContextRenderer:
def __init__(self, player):
self.player = player
self.contexts = {}
def add_render_target(self, name, width, height, context_setup_func):
"""Add a new render target with its own context."""
# Setup context-specific OpenGL state
context_setup_func()
# Create render context for this target
gl_init_params = mpv.MpvOpenGLInitParams(get_proc_address)
render_ctx = mpv.MpvRenderContext(
self.player,
'opengl',
opengl_init_params=gl_init_params
)
self.contexts[name] = {
'render_ctx': render_ctx,
'width': width,
'height': height,
'setup_func': context_setup_func
}
def render_to_target(self, target_name, fbo_id=0):
"""Render to specific target."""
if target_name not in self.contexts:
return False
ctx_info = self.contexts[target_name]
render_ctx = ctx_info['render_ctx']
# Setup target context
ctx_info['setup_func']()
if render_ctx.update():
# Create FBO parameters
fbo_params = mpv.MpvOpenGLFBO(
ctx_info['width'],
ctx_info['height'],
fbo_id
)
# Render
result = render_ctx.render(opengl_fbo=fbo_params)
render_ctx.report_swap()
return result == 0
return False
def cleanup(self):
"""Clean up all render contexts."""
for ctx_info in self.contexts.values():
ctx_info['render_ctx'].free()
# Usage with multiple windows/contexts
multi_renderer = MultiContextRenderer(player)
# Add different render targets
def setup_main_window():
# Setup main window OpenGL context
pass
def setup_preview_window():
# Setup preview window OpenGL context
pass
multi_renderer.add_render_target('main', 1920, 1080, setup_main_window)
multi_renderer.add_render_target('preview', 320, 240, setup_preview_window)
# Render to different targets
player.play('/path/to/video.mp4')
while True: # Main loop
# Render to main window
multi_renderer.render_to_target('main')
# Render to preview window
multi_renderer.render_to_target('preview')
# Handle events, etc.class CustomRenderPipeline:
def __init__(self, player):
self.player = player
self.render_ctx = None
self.post_process_shaders = []
def setup_pipeline(self):
"""Setup custom rendering pipeline."""
# Create render context
gl_init_params = mpv.MpvOpenGLInitParams(get_proc_address)
self.render_ctx = mpv.MpvRenderContext(
self.player,
'opengl',
opengl_init_params=gl_init_params
)
# Create intermediate framebuffers for post-processing
self.setup_framebuffers()
def setup_framebuffers(self):
"""Create framebuffers for pipeline stages."""
# Main rendering FBO
self.main_fbo = gl.glGenFramebuffers(1)
self.main_texture = gl.glGenTextures(1)
# Post-processing FBOs
self.post_fbos = []
self.post_textures = []
for i in range(2): # Ping-pong buffers
fbo = gl.glGenFramebuffers(1)
texture = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, texture)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA8,
1920, 1080, 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, None)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR)
self.post_fbos.append(fbo)
self.post_textures.append(texture)
def add_post_process_shader(self, vertex_shader, fragment_shader):
"""Add post-processing shader to pipeline."""
# Compile and link shader program
program = self.compile_shader_program(vertex_shader, fragment_shader)
self.post_process_shaders.append(program)
def compile_shader_program(self, vertex_src, fragment_src):
"""Compile OpenGL shader program."""
# Vertex shader
vertex_shader = gl.glCreateShader(gl.GL_VERTEX_SHADER)
gl.glShaderSource(vertex_shader, vertex_src)
gl.glCompileShader(vertex_shader)
# Fragment shader
fragment_shader = gl.glCreateShader(gl.GL_FRAGMENT_SHADER)
gl.glShaderSource(fragment_shader, fragment_src)
gl.glCompileShader(fragment_shader)
# Program
program = gl.glCreateProgram()
gl.glAttachShader(program, vertex_shader)
gl.glAttachShader(program, fragment_shader)
gl.glLinkProgram(program)
# Cleanup individual shaders
gl.glDeleteShader(vertex_shader)
gl.glDeleteShader(fragment_shader)
return program
def render_with_pipeline(self):
"""Render frame through custom pipeline."""
if not self.render_ctx.update():
return False
# Render mpv frame to main FBO
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.main_fbo)
fbo_params = mpv.MpvOpenGLFBO(1920, 1080, self.main_fbo)
result = self.render_ctx.render(opengl_fbo=fbo_params)
if result != 0:
return False
# Apply post-processing pipeline
current_input = self.main_texture
for i, shader in enumerate(self.post_process_shaders):
# Use ping-pong buffers
output_idx = i % 2
output_fbo = self.post_fbos[output_idx]
output_texture = self.post_textures[output_idx]
# Bind output framebuffer
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, output_fbo)
gl.glViewport(0, 0, 1920, 1080)
# Use shader
gl.glUseProgram(shader)
# Bind input texture
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glBindTexture(gl.GL_TEXTURE_2D, current_input)
gl.glUniform1i(gl.glGetUniformLocation(shader, "inputTexture"), 0)
# Render fullscreen quad
self.render_fullscreen_quad()
current_input = output_texture
# Final output to screen
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0)
gl.glViewport(0, 0, 1920, 1080)
# Simple blit of final result
gl.glUseProgram(0)
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glBindTexture(gl.GL_TEXTURE_2D, current_input)
gl.glEnable(gl.GL_TEXTURE_2D)
# Render final quad
self.render_fullscreen_quad()
self.render_ctx.report_swap()
return True
def render_fullscreen_quad(self):
"""Render a fullscreen quad for post-processing."""
gl.glBegin(gl.GL_QUADS)
gl.glTexCoord2f(0, 0); gl.glVertex2f(-1, -1)
gl.glTexCoord2f(1, 0); gl.glVertex2f(1, -1)
gl.glTexCoord2f(1, 1); gl.glVertex2f(1, 1)
gl.glTexCoord2f(0, 1); gl.glVertex2f(-1, 1)
gl.glEnd()
# Usage
pipeline = CustomRenderPipeline(player)
pipeline.setup_pipeline()
# Add bloom effect shader
bloom_fragment = """
#version 330 core
uniform sampler2D inputTexture;
in vec2 texCoord;
out vec4 fragColor;
void main() {
vec3 color = texture(inputTexture, texCoord).rgb;
// Simple bloom effect
float brightness = dot(color, vec3(0.2126, 0.7152, 0.0722));
if (brightness > 0.8) {
color *= 1.5;
}
fragColor = vec4(color, 1.0);
}
"""
bloom_vertex = """
#version 330 core
layout(location = 0) in vec2 position;
out vec2 texCoord;
void main() {
texCoord = position * 0.5 + 0.5;
gl_Position = vec4(position, 0.0, 1.0);
}
"""
pipeline.add_post_process_shader(bloom_vertex, bloom_fragment)
# Render with custom pipeline
player.play('/path/to/video.mp4')
while True: # Main loop
pipeline.render_with_pipeline()
# Swap buffers, handle events, etc.Install with Tessl CLI
npx tessl i tessl/pypi-mpv@1.0.2