A Python utility belt containing simple tools, a stdlib like feel, and extra batteries
Function manipulation, memoization, and enhanced concurrency tools for improving performance and managing function behavior.
Basic function manipulation and utility functions.
def identity(arg):
"""
Identity function that returns input unchanged.
Args:
arg: Any input value
Returns:
The same value passed as input
Note:
Useful as default function argument or for functional programming
"""
def inject_method(class_, func, name=None):
"""
Dynamically inject method into class.
Args:
class_: Target class to modify
func: Function to inject as method
name (str): Method name (uses func.__name__ if None)
Returns:
The modified class
"""
def compatible(func, args, kwargs):
"""
Check if function is compatible with given arguments.
Args:
func: Function to check
args: Positional arguments
kwargs: Keyword arguments
Returns:
bool: True if function can be called with given arguments
"""Decorators for caching function results to improve performance.
def memoize(func=None, **kwargs):
"""
Memoize function results for performance optimization.
Args:
func: Function to memoize (when used as @memoize)
**kwargs: Memoization options
Returns:
Memoized function
Usage:
@ub.memoize
def expensive_function(x):
return x ** 2
"""
def memoize_method(func=None, **kwargs):
"""
Memoize instance method results.
Args:
func: Method to memoize
**kwargs: Memoization options
Returns:
Memoized method
Usage:
class MyClass:
@ub.memoize_method
def expensive_method(self, x):
return x ** 2
"""
def memoize_property(func=None, **kwargs):
"""
Memoize property computation.
Args:
func: Property getter to memoize
**kwargs: Memoization options
Returns:
Memoized property
Usage:
class MyClass:
@ub.memoize_property
def expensive_property(self):
return expensive_computation()
"""Enhanced concurrency tools for parallel execution and job management.
class Executor:
"""
Enhanced executor interface wrapping concurrent.futures.
Provides simplified API for parallel execution.
"""
def __init__(self, mode='thread', max_workers=None, **kwargs):
"""
Args:
mode (str): Execution mode ('thread', 'process')
max_workers (int): Maximum number of workers
**kwargs: Additional executor options
"""
def submit(self, func, *args, **kwargs):
"""Submit function for execution"""
def map(self, func, *iterables, **kwargs):
"""Map function over iterables in parallel"""
def __enter__(self): ...
def __exit__(self, exc_type, exc_val, exc_tb): ...
class JobPool:
"""
Job pool for managing concurrent tasks with progress tracking.
"""
def __init__(self, mode='thread', max_workers=None): ...
def submit(self, func, *args, **kwargs):
"""Submit job to pool"""
def collect(self, show_progress=True):
"""Collect all job results"""
def __enter__(self): ...
def __exit__(self, exc_type, exc_val, exc_tb): ...import ubelt as ub
# Identity function
result = ub.identity(42)
print(result) # 42
# Useful in functional programming
data = [1, 2, 3, 4, 5]
filtered = filter(lambda x: x > 3, data)
# Use identity when no transformation needed
result = list(map(ub.identity, filtered))
print(result) # [4, 5]
# Default function argument
def process_data(data, transform=ub.identity):
"""Process data with optional transformation"""
return [transform(item) for item in data]
# No transformation
result1 = process_data([1, 2, 3])
print(result1) # [1, 2, 3]
# With transformation
result2 = process_data([1, 2, 3], transform=lambda x: x * 2)
print(result2) # [2, 4, 6]import ubelt as ub
# Define a class
class Calculator:
def __init__(self, value=0):
self.value = value
def add(self, x):
self.value += x
return self
# Define function to inject
def multiply(self, x):
self.value *= x
return self
def power(self, exp):
self.value = self.value ** exp
return self
# Inject methods dynamically
ub.inject_method(Calculator, multiply)
ub.inject_method(Calculator, power)
# Use the enhanced class
calc = Calculator(5)
result = calc.add(3).multiply(2).power(2)
print(calc.value) # ((5 + 3) * 2) ** 2 = 256import ubelt as ub
def func_with_args(a, b, c=10):
return a + b + c
def func_with_kwargs(x, **kwargs):
return x + sum(kwargs.values())
# Check compatibility
args1 = (1, 2)
kwargs1 = {'c': 5}
compatible1 = ub.compatible(func_with_args, args1, kwargs1)
print(f"Compatible with args {args1}, kwargs {kwargs1}: {compatible1}") # True
args2 = (1,) # Missing required argument 'b'
compatible2 = ub.compatible(func_with_args, args2, {})
print(f"Compatible with args {args2}: {compatible2}") # False
# Use for safe function calls
def safe_call(func, *args, **kwargs):
if ub.compatible(func, args, kwargs):
return func(*args, **kwargs)
else:
return None
result = safe_call(func_with_args, 1, 2, c=3)
print(f"Safe call result: {result}") # 6import ubelt as ub
import time
# Basic function memoization
@ub.memoize
def fibonacci(n):
if n <= 1:
return n
time.sleep(0.01) # Simulate expensive computation
return fibonacci(n-1) + fibonacci(n-2)
# First call is slow
start_time = time.time()
result1 = fibonacci(10)
time1 = time.time() - start_time
print(f"First call: {result1} in {time1:.3f}s")
# Second call is fast (memoized)
start_time = time.time()
result2 = fibonacci(10)
time2 = time.time() - start_time
print(f"Memoized call: {result2} in {time2:.6f}s")
# Method memoization
class DataProcessor:
def __init__(self, data):
self.data = data
@ub.memoize_method
def expensive_analysis(self, threshold):
"""Expensive analysis that should be cached"""
print(f"Performing analysis with threshold {threshold}")
time.sleep(0.1) # Simulate work
return sum(x for x in self.data if x > threshold)
processor = DataProcessor([1, 5, 10, 15, 20, 25])
result1 = processor.expensive_analysis(10) # Slow first call
print(f"Analysis result: {result1}")
result2 = processor.expensive_analysis(10) # Fast second call
print(f"Cached result: {result2}")import ubelt as ub
import time
class ExpensiveCalculation:
def __init__(self, data):
self.data = data
@ub.memoize_property
def mean(self):
"""Expensive mean calculation"""
print("Computing mean...")
time.sleep(0.1) # Simulate expensive computation
return sum(self.data) / len(self.data)
@ub.memoize_property
def variance(self):
"""Expensive variance calculation"""
print("Computing variance...")
time.sleep(0.1)
mean_val = self.mean # This will use memoized mean
return sum((x - mean_val) ** 2 for x in self.data) / len(self.data)
calc = ExpensiveCalculation([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
print(f"Mean: {calc.mean}") # Computes and caches
print(f"Variance: {calc.variance}") # Computes and caches (reuses mean)
print(f"Mean again: {calc.mean}") # Uses cacheimport ubelt as ub
import time
def slow_function(x):
"""Simulate slow computation"""
time.sleep(0.1)
return x ** 2
# Sequential execution
start_time = time.time()
results_sequential = [slow_function(i) for i in range(10)]
sequential_time = time.time() - start_time
print(f"Sequential: {sequential_time:.3f}s")
# Parallel execution with Executor
start_time = time.time()
with ub.Executor(mode='thread', max_workers=4) as executor:
results_parallel = list(executor.map(slow_function, range(10)))
parallel_time = time.time() - start_time
print(f"Parallel: {parallel_time:.3f}s")
print(f"Speedup: {sequential_time/parallel_time:.2f}x")import ubelt as ub
import time
import random
def variable_work(task_id):
"""Task with variable execution time"""
work_time = random.uniform(0.1, 0.5)
time.sleep(work_time)
return f"Task {task_id} completed in {work_time:.2f}s"
# Submit jobs to pool
with ub.JobPool(mode='thread', max_workers=3) as pool:
# Submit multiple jobs
for i in range(10):
pool.submit(variable_work, i)
# Collect results with progress bar
results = pool.collect(show_progress=True)
print("Results:")
for result in results:
print(f" {result}")import ubelt as ub
import time
import requests
from concurrent.futures import as_completed
def fetch_url(url):
"""Fetch URL with error handling"""
try:
response = requests.get(url, timeout=5)
return {'url': url, 'status': response.status_code, 'size': len(response.content)}
except Exception as e:
return {'url': url, 'error': str(e)}
# URLs to fetch
urls = [
'https://httpbin.org/delay/1',
'https://httpbin.org/delay/2',
'https://httpbin.org/status/200',
'https://httpbin.org/status/404',
'https://httpbin.org/json'
]
# Fetch URLs in parallel with custom handling
with ub.Executor(mode='thread', max_workers=3) as executor:
# Submit all tasks
future_to_url = {executor.submit(fetch_url, url): url for url in urls}
# Process results as they complete
for future in ub.ProgIter(as_completed(future_to_url), total=len(urls), desc='Fetching'):
url = future_to_url[future]
try:
result = future.result()
if 'error' in result:
print(f"Error fetching {url}: {result['error']}")
else:
print(f"Success {url}: {result['status']} ({result['size']} bytes)")
except Exception as e:
print(f"Exception for {url}: {e}")import ubelt as ub
import time
# Memoization with cache size limit
@ub.memoize
def expensive_function(x, y):
print(f"Computing {x} + {y}")
time.sleep(0.1)
return x + y
# Clear cache when needed
def reset_cache():
expensive_function.cache_clear()
# Use memoized function
result1 = expensive_function(1, 2) # Computes
result2 = expensive_function(1, 2) # Uses cache
result3 = expensive_function(3, 4) # Computes
print(f"Cache info: {expensive_function.cache_info()}")
# Clear and recompute
reset_cache()
result4 = expensive_function(1, 2) # Computes againInstall with Tessl CLI
npx tessl i tessl/pypi-ubelt