CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-pylibmc

Quick and small memcached client for Python

86

1.03x
Overview
Eval results
Files

connection-pooling.mddocs/

Connection Pooling

Thread-safe connection pooling implementations for high-concurrency applications. pylibmc provides two pooling strategies: queue-based pooling for shared access and thread-mapped pooling for per-thread client isolation.

Capabilities

ClientPool

Queue-based client pool that maintains a pool of client connections for thread-safe sharing. Uses a queue to manage client instances and provides context manager support for automatic resource management.

class ClientPool:
    """
    Queue-based client pool for thread-safe memcached access.
    Inherits from queue.Queue for thread-safe client management.
    """
    
    def __init__(mc=None, n_slots: int = 0):
        """
        Initialize client pool.
        
        Parameters:
        - mc: Master client to clone for pool (optional)
        - n_slots (int): Number of client slots in pool
        """

    def reserve(block: bool = False):
        """
        Context manager for reserving a client from the pool.
        
        Parameters:
        - block (bool): Whether to block if pool is empty
        
        Returns:
        Context manager yielding a client instance
        
        Raises:
        - queue.Empty: If block=False and no clients available
        """

    def fill(mc, n_slots: int):
        """
        Fill the pool with cloned clients.
        
        Parameters:
        - mc: Master client to clone
        - n_slots (int): Number of client instances to create
        """

    def put(client):
        """
        Return a client to the pool.
        
        Parameters:
        - client: Client instance to return
        """

    def get(block: bool = True):
        """
        Get a client from the pool.
        
        Parameters:
        - block (bool): Whether to block if pool is empty
        
        Returns:
        Client instance from pool
        
        Raises:
        - queue.Empty: If block=False and no clients available
        """

ThreadMappedPool

Thread-mapped client pool that maintains one client per thread. Automatically creates clients for new threads and provides thread-local client access without explicit queuing.

class ThreadMappedPool(dict):
    """
    Thread-mapped client pool with per-thread client instances.
    Inherits from dict for thread-to-client mapping.
    """
    
    def __init__(master):
        """
        Initialize thread-mapped pool.
        
        Parameters:
        - master: Master client to clone for each thread
        """

    def reserve():
        """
        Context manager for reserving the current thread's client.
        Creates a new client for the thread if none exists.
        
        Returns:
        Context manager yielding thread-local client instance
        """

    def relinquish():
        """
        Release the current thread's client from the pool.
        Should be called before thread exit to prevent memory leaks.
        
        Returns:
        The client instance that was released, or None
        """

    @property
    def current_key():
        """
        Get the current thread's identifier key.
        
        Returns:
        Thread identifier used as pool key
        """

Pool Management Functions

Utility functions for pool administration and monitoring.

def clone():
    """
    Create a copy of the client with same configuration.
    Used internally by pools to create client instances.
    
    Returns:
    New client instance with identical configuration
    """

Usage Examples

ClientPool Usage

import pylibmc
from queue import Empty

# Create master client
master_client = pylibmc.Client(["localhost:11211"], binary=True)
master_client.behaviors = {"tcp_nodelay": True, "ketama": True}

# Create and fill pool
pool = pylibmc.ClientPool()
pool.fill(master_client, 10)  # 10 client instances in pool

# Use pool in application
def handle_request():
    try:
        with pool.reserve(block=False) as client:
            client.set("request:123", "data")
            return client.get("request:123")
    except Empty:
        print("No clients available, consider increasing pool size")
        return None

# Alternative: manual client management
client = pool.get(block=True)  # Get client, blocking if needed
try:
    client.set("manual", "value")
    result = client.get("manual")
finally:
    pool.put(client)  # Always return client to pool

ThreadMappedPool Usage

import pylibmc
import threading

# Create master client
master_client = pylibmc.Client(["localhost:11211"], binary=True)
master_client.behaviors = {"tcp_nodelay": True}

# Create thread-mapped pool
pool = pylibmc.ThreadMappedPool(master_client)

def worker_thread(thread_id):
    """Worker function that uses thread-local client."""
    try:
        # Each thread gets its own client automatically
        with pool.reserve() as client:
            client.set(f"thread:{thread_id}", f"data from {thread_id}")
            result = client.get(f"thread:{thread_id}")
            print(f"Thread {thread_id}: {result}")
    finally:
        # Clean up when thread exits
        pool.relinquish()

# Start multiple threads
threads = []
for i in range(5):
    t = threading.Thread(target=worker_thread, args=(i,))
    threads.append(t)
    t.start()

# Wait for threads to complete
for t in threads:
    t.join()

Pool Comparison

import pylibmc
import threading
import time

master_client = pylibmc.Client(["localhost:11211"])

# ClientPool: Good for limiting total connections
client_pool = pylibmc.ClientPool()
client_pool.fill(master_client, 5)  # Max 5 concurrent connections

# ThreadMappedPool: Good for per-thread isolation  
thread_pool = pylibmc.ThreadMappedPool(master_client)

def benchmark_client_pool():
    """Shared pool with potential blocking."""
    with client_pool.reserve() as client:
        client.set("test", "value")
        time.sleep(0.1)  # Simulate work
        return client.get("test")

def benchmark_thread_pool():
    """Per-thread client, no blocking."""
    with thread_pool.reserve() as client:
        client.set("test", "value")
        time.sleep(0.1)  # Simulate work
        return client.get("test")

# ClientPool may block if all 5 clients are busy
# ThreadMappedPool creates new client per thread (no limit)

Advanced Pool Configuration

import pylibmc
from queue import Queue

# Custom pool size based on workload
master_client = pylibmc.Client(["localhost:11211"], binary=True)
master_client.behaviors = {
    "tcp_nodelay": True,
    "ketama": True,
    "no_block": True,  # Non-blocking I/O
    "connect_timeout": 5000,  # 5 second timeout
    "retry_timeout": 30  # 30 second retry timeout
}

# Size pool based on expected concurrent requests
expected_concurrency = 20
pool = pylibmc.ClientPool(master_client, expected_concurrency)

# Monitor pool usage
def get_pool_stats():
    """Get current pool utilization."""
    return {
        "pool_size": pool.qsize(),
        "clients_available": not pool.empty(),
        "pool_full": pool.full()
    }

# Use pool with error handling
def safe_cache_operation(key, value=None):
    """Perform cache operation with proper error handling."""
    try:
        with pool.reserve(block=False) as client:
            if value is not None:
                return client.set(key, value)
            else:
                return client.get(key)
    except Queue.Empty:
        print("Pool exhausted - consider increasing pool size")
        return None
    except pylibmc.Error as e:
        print(f"Cache operation failed: {e}")
        return None

Pool Selection Guidelines

Use ClientPool When:

  • You want to limit total number of connections to memcached
  • Memory usage is a concern (fixed number of clients)
  • You have predictable, moderate concurrency levels
  • You can tolerate occasional blocking when pool is exhausted

Use ThreadMappedPool When:

  • You have high, unpredictable concurrency
  • Each thread does significant work with the client
  • You want to avoid blocking entirely
  • Memory usage for additional clients is acceptable
  • Thread lifetime is predictable for proper cleanup

Performance Considerations:

  • ClientPool: Lower memory usage, potential blocking, queue overhead
  • ThreadMappedPool: Higher memory usage, no blocking, thread-local access
  • Both pools use client cloning which preserves all behaviors and settings
  • Pool overhead is minimal compared to memcached network operations

Install with Tessl CLI

npx tessl i tessl/pypi-pylibmc

docs

client-operations.md

configuration.md

connection-pooling.md

error-handling.md

index.md

tile.json