CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-asyncstdlib

The missing async toolbox - re-implements functions and classes of the Python standard library to make them compatible with async callables, iterables and context managers

84

3.36x

Quality

Pending

Does it follow best practices?

Impact

84%

3.36x

Average score across 10 eval scenarios

Overview
Eval results
Files

functools.mddocs/

Functional Programming

Async versions of functools utilities including reduction, caching, and property decorators designed for async functions and methods. These tools enable functional programming patterns in async contexts.

Capabilities

Reduction Operations

Apply functions cumulatively to items in async iterables.

async def reduce(function, iterable, initial=None):
    """
    Apply function cumulatively to items in iterable.

    Parameters:
    - function: Callable[[T1, T2], T1] - Binary function to apply
    - iterable: AnyIterable[T2] - Iterable to reduce
    - initial: T1, optional - Initial value

    Returns:
    T1 - Final accumulated value

    Raises:
    TypeError - If iterable is empty and no initial value provided
    """

Usage example:

async def reduction_example():
    async def numbers():
        for i in range(1, 6):
            yield i
    
    # Calculate factorial: 1 * 2 * 3 * 4 * 5
    factorial = await reduce(lambda x, y: x * y, numbers())  # 120
    
    # Sum with initial value
    total = await reduce(lambda x, y: x + y, numbers(), 10)  # 25

Caching Decorators

Memoization decorators for async functions with LRU (Least Recently Used) eviction.

def lru_cache(maxsize=128, typed=False):
    """
    LRU cache decorator for async functions.

    Parameters:
    - maxsize: int or None - Maximum cache size (None for unlimited)
    - typed: bool - If True, cache separate entries for different argument types

    Returns:
    Decorator function that wraps async callables with caching

    Usage:
    @lru_cache(maxsize=256)
    async def expensive_async_operation(param):
        # ... expensive computation
        return result
    """

def cache(user_function):
    """
    Simple cache decorator for async functions (equivalent to lru_cache(maxsize=None)).

    Parameters:
    - user_function: Async callable to cache

    Returns:
    Cached version of the function

    Usage:
    @cache
    async def compute_value(x, y):
        # ... expensive computation
        return result
    """

Cached Properties

Property descriptors that cache async computation results.

class CachedProperty:
    """
    Cached property descriptor for async getters.
    """
    
    def __init__(self, getter, lock_type=...):
        """
        Parameters:
        - getter: Callable[[T], Awaitable[R]] - Async getter function
        - lock_type: type[AsyncContextManager[Any]], optional - Lock type for thread safety
        """
    
    def __set_name__(self, owner, name):
        """Set the attribute name on the owner class."""
    
    def __get__(self, instance, owner):
        """
        Get cached property value.

        Returns:
        Awaitable[R] - Awaitable that resolves to cached value
        """
    
    def __set__(self, instance, value):
        """Set cached value directly."""
    
    def __del__(self, instance):
        """Delete cached value."""

def cached_property(getter):
    """
    Decorator to create cached property from async getter.

    Parameters:
    - getter: Callable[[T], Awaitable[R]] - Async getter function

    Returns:
    CachedProperty[T, R] - Cached property descriptor

    Usage:
    class MyClass:
        @cached_property
        async def expensive_property(self):
            # ... expensive async computation
            return result
    """

Usage Examples

Caching Expensive Operations

from asyncstdlib import lru_cache, cache
import aiohttp

@lru_cache(maxsize=100)
async def fetch_user_data(user_id):
    """Cache user data fetches to avoid repeated API calls."""
    async with aiohttp.ClientSession() as session:
        async with session.get(f"/api/users/{user_id}") as response:
            return await response.json()

@cache
async def compute_fibonacci(n):
    """Cache fibonacci computation (unlimited cache size)."""
    if n <= 1:
        return n
    # Note: This creates a simple recursive cache
    return await compute_fibonacci(n-1) + await compute_fibonacci(n-2)

async def caching_example():
    # First call fetches from API
    user1 = await fetch_user_data("123")
    
    # Second call returns cached result
    user1_cached = await fetch_user_data("123")
    
    # Fibonacci with caching
    result = await compute_fibonacci(10)  # 55

Cached Properties

import asyncio
from asyncstdlib import cached_property

class DataProcessor:
    def __init__(self, data_source):
        self.data_source = data_source
    
    @cached_property
    async def processed_data(self):
        """Expensive data processing, cached after first access."""
        print("Processing data...")  # Only prints once
        await asyncio.sleep(1)  # Simulate expensive operation
        return [item.upper() for item in self.data_source]
    
    @cached_property
    async def data_summary(self):
        """Summary depends on processed data."""
        data = await self.processed_data  # Uses cached value
        return {
            "count": len(data),
            "first": data[0] if data else None,
            "last": data[-1] if data else None
        }

async def property_example():
    processor = DataProcessor(["hello", "world", "async"])
    
    # First access processes and caches
    data1 = await processor.processed_data  # "Processing data..." printed
    
    # Second access uses cache
    data2 = await processor.processed_data  # No print, returns cached value
    
    # Summary uses cached processed_data
    summary = await processor.data_summary
    print(summary)  # {'count': 3, 'first': 'HELLO', 'last': 'ASYNC'}

Advanced Reduction Operations

async def advanced_reduction():
    async def transactions():
        data = [
            {"type": "deposit", "amount": 100},
            {"type": "withdrawal", "amount": 30},
            {"type": "deposit", "amount": 50},
            {"type": "withdrawal", "amount": 20}
        ]
        for tx in data:
            yield tx
    
    # Calculate running balance
    def apply_transaction(balance, transaction):
        if transaction["type"] == "deposit":
            return balance + transaction["amount"]
        else:
            return balance - transaction["amount"]
    
    final_balance = await reduce(apply_transaction, transactions(), 0)
    print(f"Final balance: ${final_balance}")  # Final balance: $100
    
    # Find maximum value with custom comparison
    async def values():
        for x in [3, 1, 4, 1, 5, 9, 2, 6]:
            yield x
    
    maximum = await reduce(lambda a, b: a if a > b else b, values())
    print(f"Maximum: {maximum}")  # Maximum: 9

Custom Lock Types for Thread Safety

import asyncio
from asyncstdlib import cached_property

class ThreadSafeProcessor:
    @cached_property(asyncio.Lock)  # Use asyncio.Lock for thread safety
    async def shared_resource(self):
        """Thread-safe cached property."""
        await asyncio.sleep(0.1)
        return "computed value"

# For trio users:
# @cached_property(trio.Lock)
# async def trio_safe_property(self): ...

Install with Tessl CLI

npx tessl i tessl/pypi-asyncstdlib

docs

asynctools.md

builtins.md

contextlib.md

functools.md

heapq.md

index.md

itertools.md

tile.json