Creates Metal shaders, builds instanced 3D rendering pipelines, optimizes GPU performance, and implements spatial computing interactions for macOS and visionOS/Vision Pro. Handles Compositor Services stereo frame streaming, RemoteImmersiveSpace setup, gaze and pinch gesture recognition, and GPU-based physics for graph layout. Use when the user asks about Metal shaders, .metal files, 3D rendering on macOS, GPU programming, visionOS or Vision Pro development, spatial computing, RealityKit, SceneKit, AR, augmented reality, instanced rendering, render pipeline optimization, or stereoscopic output.
90
88%
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Passed
No known issues
Native Swift and Metal specialist for building high-performance 3D rendering systems and spatial computing experiences targeting macOS and visionOS/Vision Pro.
// Core Metal rendering — instanced node + edge rendering
class MetalGraphRenderer {
private let device: MTLDevice
private let commandQueue: MTLCommandQueue
private var nodePipelineState: MTLRenderPipelineState
private var edgePipelineState: MTLRenderPipelineState
private var depthState: MTLDepthStencilState
// Per-instance data uploaded to GPU
struct NodeInstance {
var position: SIMD3<Float>
var color: SIMD4<Float>
var scale: Float
var symbolId: UInt32
}
// GPU buffers (triple-buffered for smooth updates)
private var nodeBuffers: [MTLBuffer] // Per-instance data
private var edgeBuffer: MTLBuffer // Edge connections
private var uniformBuffer: MTLBuffer // View/projection matrices
private var currentBufferIndex = 0
func render(nodes: [GraphNode], edges: [GraphEdge], camera: Camera) {
guard let commandBuffer = commandQueue.makeCommandBuffer(),
let descriptor = view.currentRenderPassDescriptor,
let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: descriptor) else { return }
// Update uniforms
var uniforms = Uniforms(
viewMatrix: camera.viewMatrix,
projectionMatrix: camera.projectionMatrix,
time: CACurrentMediaTime()
)
uniformBuffer.contents().copyMemory(from: &uniforms, byteCount: MemoryLayout<Uniforms>.stride)
// Draw instanced nodes
encoder.setRenderPipelineState(nodePipelineState)
encoder.setVertexBuffer(nodeBuffers[currentBufferIndex], offset: 0, index: 0)
encoder.setVertexBuffer(uniformBuffer, offset: 0, index: 1)
encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0,
vertexCount: 4, instanceCount: nodes.count)
// Draw edges
encoder.setRenderPipelineState(edgePipelineState)
encoder.setVertexBuffer(edgeBuffer, offset: 0, index: 0)
encoder.drawPrimitives(type: .line, vertexStart: 0, vertexCount: edges.count * 2)
encoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
currentBufferIndex = (currentBufferIndex + 1) % 3
}
}.storageModePrivate Metal resources for frequently updated dataNote: The Compositor Services API surface evolves across visionOS releases. Treat the structure below as representative pseudocode; validate method signatures against the current SDK before use.
// Compositor Services stereo frame streaming (pseudocode — verify against current SDK)
import CompositorServices
class VisionProCompositor {
private let layerRenderer: LayerRenderer
init() async throws {
let configuration = LayerRenderer.Configuration()
// Configure for stereo, RGBA16Float color, Depth32Float
self.layerRenderer = try await LayerRenderer(configuration: configuration)
}
func streamFrame(leftEye: MTLTexture, rightEye: MTLTexture, depth: MTLTexture?) async throws {
guard let frame = layerRenderer.queryNextFrame() else { return }
frame.startSubmission()
// Populate per-eye drawable textures via LayerRenderer.Drawable
// Actual texture assignment uses the drawable's color and depth textures
// obtained from frame.drawable — see CompositorServices documentation
frame.endSubmission()
}
}// Gaze and pinch gesture handling for Vision Pro
class SpatialInteractionHandler {
struct RaycastHit {
let nodeId: String
let distance: Float
let worldPosition: SIMD3<Float>
}
func handleGaze(origin: SIMD3<Float>, direction: SIMD3<Float>) -> RaycastHit? {
let hits = performGPURaycast(origin: origin, direction: direction)
return hits.min(by: { $0.distance < $1.distance })
}
func handlePinch(location: SIMD3<Float>, state: GestureState) {
switch state {
case .began:
if let hit = raycastAtLocation(location) { beginSelection(nodeId: hit.nodeId) }
case .changed:
updateSelection(location: location)
case .ended:
if let node = currentSelection { delegate?.didSelectNode(node) }
}
}
}// Force-directed layout kernel — runs entirely on GPU
kernel void updateGraphLayout(
device Node* nodes [[buffer(0)]],
device Edge* edges [[buffer(1)]],
constant Params& params [[buffer(2)]],
uint id [[thread_position_in_grid]])
{
if (id >= params.nodeCount) return;
float3 force = float3(0);
Node node = nodes[id];
// Repulsion between all nodes
for (uint i = 0; i < params.nodeCount; i++) {
if (i == id) continue;
float3 diff = node.position - nodes[i].position;
float dist = length(diff);
float repulsion = params.repulsionStrength / (dist * dist + 0.1);
force += normalize(diff) * repulsion;
}
// Attraction along edges
for (uint i = 0; i < params.edgeCount; i++) {
Edge edge = edges[i];
if (edge.source == id) {
float3 diff = nodes[edge.target].position - node.position;
force += normalize(diff) * length(diff) * params.attractionStrength;
}
}
// Integrate
node.velocity = node.velocity * params.damping + force * params.deltaTime;
node.position += node.velocity * params.deltaTime;
nodes[id] = node;
}.storageModeShared) for CPU→GPU transfers; private for GPU-onlyMTLBuffer and MTLTexture instances; avoid per-frame allocationMTLHeap for sub-allocating related resources together[weak self] in command buffer completion handlers# Generate Xcode project with Metal support
xcodegen generate --spec project.yml
# Required frameworks: Metal, MetalKit, CompositorServices, RealityKitValidation checkpoint: Enable the Metal validation layer (MTL_DEBUG_LAYER=1) and confirm all pipeline state objects compile without errors before proceeding.
Validation checkpoint: Use Metal System Trace in Instruments to confirm sustained 90 fps with target node counts before adding Vision Pro integration.
Validation checkpoint: Confirm stereo frame timing with Metal System Trace; verify no frame drops under ARKit hand-tracking load.
Validation checkpoint: Run a full Instruments session (Metal System Trace + Allocations) — frame time ≤11.1 ms for 25k nodes, memory ≤1 GB.
Additional techniques available when needed: indirect command buffers (GPU-driven rendering to eliminate CPU draw-call overhead), mesh shaders (geometry amplification for dynamic topology), variable rate shading (foveated rendering via eye-tracking), hardware ray tracing (shadows and ambient occlusion on Apple Silicon), ARKit environment mapping (spatial anchors for persistent layouts), USD import/export, SharePlay (collaborative visualization across devices), and Continuity/Handoff for cross-device input.
010799b
If you maintain this skill, you can claim it as your own. Once claimed, you can manage eval scenarios, bundle related skills, attach documentation or rules, and ensure cross-agent compatibility.