Additional building blocks, recipes, and routines for working with Python iterables beyond itertools.
—
Specialized utilities for complex operations and advanced use cases.
Functions for flattening nested structures.
def flatten(iterable: Iterable[Iterable[Any]]) -> Iterator[Any]: ...
def collapse(iterable: Iterable[Any], base_type: type | tuple[type, ...] = None, levels: int = None) -> Iterator[Any]: ...Usage:
from more_itertools import flatten, collapse
# Simple flattening of one level
nested = [[1, 2], [3, 4], [5, 6]]
flat = list(flatten(nested)) # [1, 2, 3, 4, 5, 6]
# Deep flattening with collapse
deeply_nested = [1, [2, [3, [4, 5]], 6], 7]
deep_flat = list(collapse(deeply_nested)) # [1, 2, 3, 4, 5, 6, 7]
# Control flattening levels
partially_flat = list(collapse(deeply_nested, levels=1)) # [1, 2, [3, [4, 5]], 6, 7]
# Preserve certain types from flattening
mixed = [1, "hello", [2, 3], "world", [4, 5]]
flat_preserve_str = list(collapse(mixed, base_type=str)) # [1, "hello", 2, 3, "world", 4, 5]Advanced mapping and transformation functions.
def join_mappings(*mappings: Mapping[Any, Any]) -> dict[Any, list[Any]]: ...
def map_reduce(iterable: Iterable[Any], keyfunc: Callable[[Any], Any], valuefunc: Callable[[Any], Any] = None, reducefunc: Callable[[list[Any]], Any] = None) -> dict[Any, Any]: ...
def groupby_transform(iterable: Iterable[Any], keyfunc: Callable[[Any], Any] = None, valuefunc: Callable[[Any], Any] = None, reducefunc: Callable[[list[Any]], Any] = list) -> dict[Any, Any]: ...Usage:
from more_itertools import join_mappings, map_reduce, groupby_transform
# Join multiple mappings
map1 = {'a': 1, 'b': 2}
map2 = {'a': 3, 'c': 4}
map3 = {'b': 5, 'd': 6}
joined = join_mappings(map1, map2, map3)
# {'a': [1, 3], 'b': [2, 5], 'c': [4], 'd': [6]}
# Map-reduce operation
data = ['apple', 'banana', 'apricot', 'blueberry', 'cherry']
# Group by first letter, count items
result = map_reduce(
data,
keyfunc=lambda x: x[0], # Group by first letter
valuefunc=lambda x: 1, # Count each item as 1
reducefunc=sum # Sum the counts
)
# {'a': 2, 'b': 2, 'c': 1}
# Group and transform
students = [
{'name': 'Alice', 'grade': 85, 'subject': 'math'},
{'name': 'Bob', 'grade': 90, 'subject': 'math'},
{'name': 'Alice', 'grade': 78, 'subject': 'science'},
{'name': 'Bob', 'grade': 88, 'subject': 'science'}
]
# Group by name, collect grades
grades_by_student = groupby_transform(
students,
keyfunc=lambda s: s['name'],
valuefunc=lambda s: s['grade']
)
# {'Alice': [85, 78], 'Bob': [90, 88]}
# Group by name, get average grade
avg_grades = groupby_transform(
students,
keyfunc=lambda s: s['name'],
valuefunc=lambda s: s['grade'],
reducefunc=lambda grades: sum(grades) / len(grades)
)
# {'Alice': 81.5, 'Bob': 89.0}Classes for controlling iteration time.
class time_limited:
"""Iterator that yields items until time limit expires."""
def __init__(self, limit_seconds, iterable):
"""
Initialize time-limited iterator.
Args:
limit_seconds: Maximum time in seconds to iterate
iterable: The iterable to time-limit
"""
def __iter__(self):
"""Return iterator object."""
def __next__(self):
"""Return next item, checking time limit."""
@property
def timed_out(self):
"""True if time limit was exceeded."""Usage:
from more_itertools import time_limited
import time
# Generate data but limit processing time
def slow_generator():
i = 0
while True:
time.sleep(0.1) # Simulate slow operation
yield i
i += 1
# Process for at most 1 second
limited = time_limited(1.0, slow_generator())
results = list(limited) # Will stop after ~1 second
print(f"Processed {len(results)} items in ~1 second")
print(f"Timed out: {limited.timed_out}")Classes for advanced numeric range generation.
class numeric_range:
"""Extended range() for any numeric types."""
def __init__(self, *args):
"""
Initialize numeric range.
Args:
*args: 1, 2, or 3 arguments like built-in range():
- (stop): start=0, step=1
- (start, stop): step=1
- (start, stop, step): all specified
"""
def __iter__(self):
"""Return iterator over range values."""
def __len__(self):
"""Return length of range."""
def __getitem__(self, key):
"""Get item by index or slice."""
def __contains__(self, value):
"""Test if value is in range."""Usage:
from more_itertools import numeric_range
from decimal import Decimal
# Floating-point range with precise stepping
float_range = list(numeric_range(0.0, 1.0, 0.1))
# [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
# Decimal arithmetic for financial calculations
decimal_range = list(numeric_range(Decimal('0'), Decimal('1'), Decimal('0.1')))
# Avoids floating-point precision issues
# Works like built-in range but with any numeric type
range_obj = numeric_range(Decimal('2.5'), Decimal('10.5'), Decimal('1.5'))
print(len(range_obj)) # Supports len()
print(Decimal('5') in range_obj) # Supports membership testingEnhanced slicing operations.
class islice_extended:
"""Extended islice with negative index support."""
def __init__(self, iterable, *args):
"""
Initialize extended islice.
Args:
iterable: The iterable to slice
*args: Optional start, stop, step like itertools.islice
"""
def __iter__(self):
"""Return iterator object."""
def __next__(self):
"""Return next item."""
def __getitem__(self, key):
"""Support slice notation.
Args:
key: A slice object
Returns:
New islice_extended with slice applied
"""Usage:
from more_itertools import islice_extended
data = range(20)
# Standard slicing
normal_slice = list(islice_extended(data, 5, 15, 2)) # [5, 7, 9, 11, 13]
# Negative indices (enhanced feature)
data_list = list(range(10)) # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
end_slice = list(islice_extended(data_list, -3, None)) # [7, 8, 9]
# Slice notation support
iterator = islice_extended(range(100))
result = list(iterator[10:20:2]) # [10, 12, 14, 16, 18]Functions for reshaping sequences.
def reshape(iterable: Iterable[Any], shape: tuple[int, ...]) -> Iterator[Any]: ...Usage:
from more_itertools import reshape
# Reshape flat data into multi-dimensional structure
data = range(12)
matrix = list(reshape(data, (3, 4)))
# Creates 3 rows of 4 elements each:
# [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]]
# 3D reshape
cube = list(reshape(range(24), (2, 3, 4)))
# Creates 2 layers of 3x4 matricesEnhanced mapping operations.
def doublestarmap(func: Callable[..., Any], iterable: Iterable[tuple[tuple[Any, ...], dict[str, Any]]]) -> Iterator[Any]: ...Usage:
from more_itertools import doublestarmap
# Apply function with both positional and keyword arguments
def greet(name, age, title="Mr/Ms"):
return f"{title} {name} (age {age})"
# Each item is (args_tuple, kwargs_dict)
data = [
(("Alice", 25), {"title": "Dr"}),
(("Bob", 30), {}),
(("Charlie", 35), {"title": "Prof"})
]
greetings = list(doublestarmap(greet, data))
# ["Dr Alice (age 25)", "Mr/Ms Bob (age 30)", "Prof Charlie (age 35)"]Functions for random interleaving of multiple iterables.
def interleave_randomly(*iterables):
"""
Randomly interleave items from multiple iterables.
Args:
*iterables: Variable number of iterables to interleave
Returns:
Iterator yielding items in random order while preserving internal sequence
"""Usage:
from more_itertools import interleave_randomly
# Random interleaving preserves internal order
lists = [[1, 2, 3], ['a', 'b', 'c'], [True, False]]
result = list(interleave_randomly(*lists))
# Result could be: [1, 'a', True, 2, 'b', False, 3, 'c']
# Note: Order is random but each list's internal sequence is preservedEnhanced takewhile operations.
def takewhile_inclusive(predicate, iterable):
"""
Take items while predicate is true, including first failing item.
Args:
predicate: Function returning True/False for each element
iterable: Input sequence to process
Returns:
Iterator yielding elements until predicate fails, including failing element
"""Usage:
from more_itertools import takewhile_inclusive
from itertools import takewhile
# Compare standard takewhile vs takewhile_inclusive
data = [1, 3, 5, 8, 2, 4]
predicate = lambda x: x < 6
standard = list(takewhile(predicate, data)) # [1, 3, 5]
inclusive = list(takewhile_inclusive(predicate, data)) # [1, 3, 5, 8]
# Includes the first element that fails the predicateFunctions for context-managed iteration.
def with_iter(context_manager):
"""
Wrap iterable in with statement for automatic cleanup.
Args:
context_manager: Context manager that yields an iterable
Returns:
Iterator that properly closes context manager when exhausted
"""Usage:
from more_itertools import with_iter
# Automatically close file when iteration completes
lines = list(with_iter(open('data.txt')))
# Works with any context manager that returns an iterable
with open('output.txt', 'w') as f:
for line in with_iter(open('input.txt')):
f.write(line.upper())
# input.txt is automatically closed when iteration finishesFunctions for suppressing exceptions during iteration.
def iter_suppress(iterable, *exceptions):
"""
Iterate while suppressing specified exceptions.
Args:
iterable: Input iterable to process
*exceptions: Exception types to suppress during iteration
Returns:
Iterator that stops gracefully when specified exceptions occur
"""Usage:
from more_itertools import iter_suppress
def risky_generator():
for i in range(10):
if i == 5:
raise ValueError("Something went wrong")
yield i
# Suppress ValueError and get partial results
safe_iter = iter_suppress(risky_generator(), ValueError)
result = list(safe_iter) # [0, 1, 2, 3, 4] - stops at exception
# Multiple exception types
result = list(iter_suppress(some_iter, ValueError, TypeError, KeyError))Install with Tessl CLI
npx tessl i tessl/pypi-more-itertools