asyncio (PEP 3156) Redis support
Overall
score
98%
Redis data structure operations for lists, sets, hashes, sorted sets, streams, and specialized structures like HyperLogLog and geospatial indexes. These powerful collection types enable efficient modeling of complex data relationships and patterns.
Ordered collections supporting push/pop operations from both ends, indexing, and blocking operations for queue-like behavior.
async def lpush(name: str, *values: Any) -> int:
"""
Push values to the left (head) of list.
Args:
name: List key name
values: Values to push
Returns:
New list length
"""
async def rpush(name: str, *values: Any) -> int:
"""
Push values to the right (tail) of list.
Args:
name: List key name
values: Values to push
Returns:
New list length
"""
async def lpop(name: str) -> Optional[str]:
"""
Pop value from left (head) of list.
Args:
name: List key name
Returns:
Popped value or None if list is empty
"""
async def rpop(name: str) -> Optional[str]:
"""
Pop value from right (tail) of list.
Args:
name: List key name
Returns:
Popped value or None if list is empty
"""
async def lrange(name: str, start: int, end: int) -> List[str]:
"""
Get list elements by range.
Args:
name: List key name
start: Start index (0-based)
end: End index (-1 for end of list)
Returns:
List of elements in range
"""
async def llen(name: str) -> int:
"""
Get list length.
Args:
name: List key name
Returns:
Number of elements in list
"""
async def lindex(name: str, index: int) -> Optional[str]:
"""
Get list element by index.
Args:
name: List key name
index: Element index
Returns:
Element value or None if index out of bounds
"""
async def lset(name: str, index: int, value: Any) -> bool:
"""
Set list element at index.
Args:
name: List key name
index: Element index
value: New value
Returns:
True if successful
"""
async def lrem(name: str, count: int, value: Any) -> int:
"""
Remove elements from list.
Args:
name: List key name
count: Number to remove (0=all, >0=from head, <0=from tail)
value: Value to remove
Returns:
Number of elements removed
"""
async def ltrim(name: str, start: int, end: int) -> bool:
"""
Trim list to specified range.
Args:
name: List key name
start: Start index
end: End index
Returns:
True if successful
"""
async def linsert(name: str, where: str, refvalue: Any, value: Any) -> int:
"""
Insert element before or after reference value.
Args:
name: List key name
where: 'BEFORE' or 'AFTER'
refvalue: Reference value to find
value: Value to insert
Returns:
New list length or -1 if reference not found
"""
async def blpop(keys: List[str], timeout: int = 0) -> Optional[Tuple[str, str]]:
"""
Blocking pop from left of lists.
Args:
keys: List of list key names
timeout: Timeout in seconds (0=forever)
Returns:
Tuple of (key, value) or None on timeout
"""
async def brpop(keys: List[str], timeout: int = 0) -> Optional[Tuple[str, str]]:
"""
Blocking pop from right of lists.
Args:
keys: List of list key names
timeout: Timeout in seconds (0=forever)
Returns:
Tuple of (key, value) or None on timeout
"""Unordered collections of unique elements with set operations like union, intersection, and difference.
async def sadd(name: str, *values: Any) -> int:
"""
Add members to set.
Args:
name: Set key name
values: Values to add
Returns:
Number of new members added
"""
async def srem(name: str, *values: Any) -> int:
"""
Remove members from set.
Args:
name: Set key name
values: Values to remove
Returns:
Number of members removed
"""
async def smembers(name: str) -> Set[str]:
"""
Get all set members.
Args:
name: Set key name
Returns:
Set of all members
"""
async def scard(name: str) -> int:
"""
Get set cardinality (size).
Args:
name: Set key name
Returns:
Number of elements in set
"""
async def sismember(name: str, value: Any) -> bool:
"""
Test if value is set member.
Args:
name: Set key name
value: Value to test
Returns:
True if value is in set
"""
async def spop(name: str, count: Optional[int] = None) -> Union[Optional[str], List[str]]:
"""
Remove and return random set members.
Args:
name: Set key name
count: Number of members to pop
Returns:
Single member if count is None, list of members otherwise
"""
async def srandmember(name: str, number: Optional[int] = None) -> Union[Optional[str], List[str]]:
"""
Get random set members without removing.
Args:
name: Set key name
number: Number of members to return
Returns:
Single member if number is None, list of members otherwise
"""
async def sinter(keys: List[str]) -> Set[str]:
"""
Intersect multiple sets.
Args:
keys: List of set key names
Returns:
Set intersection
"""
async def sunion(keys: List[str]) -> Set[str]:
"""
Union multiple sets.
Args:
keys: List of set key names
Returns:
Set union
"""
async def sdiff(keys: List[str]) -> Set[str]:
"""
Difference of multiple sets.
Args:
keys: List of set key names
Returns:
Set difference
"""
async def sinterstore(dest: str, keys: List[str]) -> int:
"""
Store intersection in destination key.
Args:
dest: Destination key name
keys: Source set key names
Returns:
Number of elements in result set
"""
async def sunionstore(dest: str, keys: List[str]) -> int:
"""
Store union in destination key.
Args:
dest: Destination key name
keys: Source set key names
Returns:
Number of elements in result set
"""
async def sdiffstore(dest: str, keys: List[str]) -> int:
"""
Store difference in destination key.
Args:
dest: Destination key name
keys: Source set key names
Returns:
Number of elements in result set
"""Field-value mappings similar to dictionaries, ideal for representing objects and structured data.
async def hset(name: str, key: str = None, value: Any = None, mapping: Dict[str, Any] = None) -> int:
"""
Set hash fields.
Args:
name: Hash key name
key: Field name (if setting single field)
value: Field value (if setting single field)
mapping: Dictionary of field-value pairs
Returns:
Number of fields added (not updated)
"""
async def hget(name: str, key: str) -> Optional[str]:
"""
Get hash field value.
Args:
name: Hash key name
key: Field name
Returns:
Field value or None if field doesn't exist
"""
async def hgetall(name: str) -> Dict[str, str]:
"""
Get all hash fields and values.
Args:
name: Hash key name
Returns:
Dictionary of all fields and values
"""
async def hmget(name: str, keys: List[str]) -> List[Optional[str]]:
"""
Get multiple hash fields.
Args:
name: Hash key name
keys: List of field names
Returns:
List of field values in same order
"""
async def hmset(name: str, mapping: Dict[str, Any]) -> bool:
"""
Set multiple hash fields.
Args:
name: Hash key name
mapping: Dictionary of field-value pairs
Returns:
True if successful
"""
async def hdel(name: str, *keys: str) -> int:
"""
Delete hash fields.
Args:
name: Hash key name
keys: Field names to delete
Returns:
Number of fields deleted
"""
async def hexists(name: str, key: str) -> bool:
"""
Check if hash field exists.
Args:
name: Hash key name
key: Field name
Returns:
True if field exists
"""
async def hlen(name: str) -> int:
"""
Get number of hash fields.
Args:
name: Hash key name
Returns:
Number of fields in hash
"""
async def hkeys(name: str) -> List[str]:
"""
Get all hash field names.
Args:
name: Hash key name
Returns:
List of field names
"""
async def hvals(name: str) -> List[str]:
"""
Get all hash field values.
Args:
name: Hash key name
Returns:
List of field values
"""
async def hincrby(name: str, key: str, amount: int = 1) -> int:
"""
Increment hash field by integer.
Args:
name: Hash key name
key: Field name
amount: Amount to increment
Returns:
New field value
"""
async def hincrbyfloat(name: str, key: str, amount: float = 1.0) -> float:
"""
Increment hash field by float.
Args:
name: Hash key name
key: Field name
amount: Amount to increment
Returns:
New field value
"""
async def hsetnx(name: str, key: str, value: Any) -> bool:
"""
Set hash field only if it doesn't exist.
Args:
name: Hash key name
key: Field name
value: Field value
Returns:
True if field was set, False if field exists
"""Ordered sets where each member has an associated score, enabling range queries and ranking operations.
async def zadd(
name: str,
mapping: Dict[str, float],
nx: bool = False,
xx: bool = False,
ch: bool = False,
incr: bool = False
) -> int:
"""
Add members to sorted set with scores.
Args:
name: Sorted set key name
mapping: Dictionary of member-score pairs
nx: Only add new members
xx: Only update existing members
ch: Return number of changed members
incr: Increment score of single member
Returns:
Number of members added or changed
"""
async def zrem(name: str, *values: Any) -> int:
"""
Remove members from sorted set.
Args:
name: Sorted set key name
values: Members to remove
Returns:
Number of members removed
"""
async def zrange(
name: str,
start: int,
end: int,
desc: bool = False,
withscores: bool = False
) -> List[Union[str, Tuple[str, float]]]:
"""
Get sorted set members by rank range.
Args:
name: Sorted set key name
start: Start rank (0-based)
end: End rank (-1 for highest)
desc: Return in descending order
withscores: Include scores in result
Returns:
List of members or (member, score) tuples
"""
async def zrevrange(
name: str,
start: int,
end: int,
withscores: bool = False
) -> List[Union[str, Tuple[str, float]]]:
"""
Get sorted set members by rank range in reverse order.
Args:
name: Sorted set key name
start: Start rank (0-based)
end: End rank (-1 for lowest)
withscores: Include scores in result
Returns:
List of members or (member, score) tuples
"""
async def zrangebyscore(
name: str,
min_score: float,
max_score: float,
start: Optional[int] = None,
num: Optional[int] = None,
withscores: bool = False
) -> List[Union[str, Tuple[str, float]]]:
"""
Get sorted set members by score range.
Args:
name: Sorted set key name
min_score: Minimum score
max_score: Maximum score
start: Offset for pagination
num: Count for pagination
withscores: Include scores in result
Returns:
List of members or (member, score) tuples
"""
async def zcard(name: str) -> int:
"""
Get sorted set cardinality (size).
Args:
name: Sorted set key name
Returns:
Number of members in sorted set
"""
async def zscore(name: str, value: Any) -> Optional[float]:
"""
Get member score.
Args:
name: Sorted set key name
value: Member value
Returns:
Member score or None if member doesn't exist
"""
async def zrank(name: str, value: Any) -> Optional[int]:
"""
Get member rank (0-based, lowest score first).
Args:
name: Sorted set key name
value: Member value
Returns:
Member rank or None if member doesn't exist
"""
async def zrevrank(name: str, value: Any) -> Optional[int]:
"""
Get member reverse rank (0-based, highest score first).
Args:
name: Sorted set key name
value: Member value
Returns:
Member reverse rank or None if member doesn't exist
"""
async def zincrby(name: str, amount: float, value: Any) -> float:
"""
Increment member score.
Args:
name: Sorted set key name
amount: Amount to increment
value: Member value
Returns:
New member score
"""
async def zcount(name: str, min_score: float, max_score: float) -> int:
"""
Count members in score range.
Args:
name: Sorted set key name
min_score: Minimum score
max_score: Maximum score
Returns:
Number of members in range
"""Log-like data structures for storing sequences of field-value pairs with automatic ID generation and consumer group support.
async def xadd(
name: str,
fields: Dict[str, Any],
id: str = "*",
maxlen: Optional[int] = None,
approximate: bool = True
) -> str:
"""
Add entry to stream.
Args:
name: Stream key name
fields: Dictionary of field-value pairs
id: Entry ID (* for auto-generation)
maxlen: Maximum stream length
approximate: Use approximate trimming
Returns:
Entry ID
"""
async def xread(
streams: Dict[str, str],
count: Optional[int] = None,
block: Optional[int] = None
) -> Dict[str, List[Tuple[str, Dict[str, str]]]]:
"""
Read from streams.
Args:
streams: Dictionary of stream names to start IDs
count: Maximum entries per stream
block: Block for milliseconds if no data
Returns:
Dictionary mapping stream names to entries
"""
async def xrange(
name: str,
min: str = "-",
max: str = "+",
count: Optional[int] = None
) -> List[Tuple[str, Dict[str, str]]]:
"""
Get stream entries by ID range.
Args:
name: Stream key name
min: Minimum ID ("-" for start)
max: Maximum ID ("+" for end)
count: Maximum entries to return
Returns:
List of (entry_id, fields) tuples
"""
async def xlen(name: str) -> int:
"""
Get stream length.
Args:
name: Stream key name
Returns:
Number of entries in stream
"""
async def xdel(name: str, *ids: str) -> int:
"""
Delete stream entries.
Args:
name: Stream key name
ids: Entry IDs to delete
Returns:
Number of entries deleted
"""
async def xtrim(name: str, maxlen: int, approximate: bool = True) -> int:
"""
Trim stream to maximum length.
Args:
name: Stream key name
maxlen: Maximum length
approximate: Use approximate trimming
Returns:
Number of entries removed
"""
async def xgroup_create(
name: str,
groupname: str,
id: str = "$",
mkstream: bool = False
) -> bool:
"""
Create consumer group.
Args:
name: Stream key name
groupname: Consumer group name
id: Starting ID ("$" for latest)
mkstream: Create stream if it doesn't exist
Returns:
True if successful
"""
async def xreadgroup(
groupname: str,
consumername: str,
streams: Dict[str, str],
count: Optional[int] = None,
block: Optional[int] = None,
noack: bool = False
) -> Dict[str, List[Tuple[str, Dict[str, str]]]]:
"""
Read from stream as consumer group member.
Args:
groupname: Consumer group name
consumername: Consumer name
streams: Dictionary of stream names to start IDs
count: Maximum entries per stream
block: Block for milliseconds if no data
noack: Don't automatically acknowledge
Returns:
Dictionary mapping stream names to entries
"""
async def xack(name: str, groupname: str, *ids: str) -> int:
"""
Acknowledge stream messages.
Args:
name: Stream key name
groupname: Consumer group name
ids: Entry IDs to acknowledge
Returns:
Number of messages acknowledged
"""Probabilistic data structure for approximate cardinality counting of large sets with minimal memory usage.
async def pfadd(name: str, *values: Any) -> bool:
"""
Add elements to HyperLogLog.
Args:
name: HyperLogLog key name
values: Elements to add
Returns:
True if cardinality changed
"""
async def pfcount(*sources: str) -> int:
"""
Get HyperLogLog cardinality estimate.
Args:
sources: HyperLogLog key names
Returns:
Estimated cardinality
"""
async def pfmerge(dest: str, *sources: str) -> bool:
"""
Merge HyperLogLogs into destination.
Args:
dest: Destination key name
sources: Source HyperLogLog key names
Returns:
True if successful
"""Geographic coordinate storage and querying with radius-based searches and distance calculations.
async def geoadd(name: str, *values: Any) -> int:
"""
Add geospatial members.
Args:
name: Geospatial key name
values: Longitude, latitude, member tuples
Returns:
Number of members added
"""
async def geodist(name: str, place1: str, place2: str, unit: str = "m") -> Optional[float]:
"""
Get distance between geospatial members.
Args:
name: Geospatial key name
place1: First member name
place2: Second member name
unit: Distance unit ('m', 'km', 'mi', 'ft')
Returns:
Distance in specified unit
"""
async def georadius(
name: str,
longitude: float,
latitude: float,
radius: float,
unit: str = "m",
**kwargs
) -> List[Any]:
"""
Find members within radius of coordinates.
Args:
name: Geospatial key name
longitude: Center longitude
latitude: Center latitude
radius: Search radius
unit: Distance unit
Returns:
List of members within radius
"""
async def geopos(name: str, *values: str) -> List[Optional[Tuple[float, float]]]:
"""
Get coordinates of geospatial members.
Args:
name: Geospatial key name
values: Member names
Returns:
List of (longitude, latitude) tuples
"""async def list_examples():
redis = aioredis.Redis(decode_responses=True)
# Queue operations (FIFO)
await redis.lpush('queue', 'task1', 'task2', 'task3')
task = await redis.rpop('queue') # Gets 'task1'
# Stack operations (LIFO)
await redis.lpush('stack', 'item1', 'item2', 'item3')
item = await redis.lpop('stack') # Gets 'item3'
# Get range of items
items = await redis.lrange('mylist', 0, 4) # First 5 items
all_items = await redis.lrange('mylist', 0, -1) # All items
# Blocking operations for producer/consumer
result = await redis.blpop(['queue1', 'queue2'], timeout=10)
if result:
queue_name, value = result
print(f"Got {value} from {queue_name}")async def set_examples():
redis = aioredis.Redis(decode_responses=True)
# Add members to sets
await redis.sadd('tags:python', 'web', 'async', 'redis')
await redis.sadd('tags:javascript', 'web', 'frontend', 'react')
# Set operations
common = await redis.sinter(['tags:python', 'tags:javascript'])
print(f"Common tags: {common}") # {'web'}
all_tags = await redis.sunion(['tags:python', 'tags:javascript'])
python_only = await redis.sdiff(['tags:python', 'tags:javascript'])
# Check membership
is_member = await redis.sismember('tags:python', 'async')
print(f"'async' in Python tags: {is_member}") # Trueasync def hash_examples():
redis = aioredis.Redis(decode_responses=True)
# Store user data
user_data = {
'name': 'John Doe',
'email': 'john@example.com',
'age': '30',
'city': 'New York'
}
await redis.hset('user:1', mapping=user_data)
# Get specific fields
name = await redis.hget('user:1', 'name')
email = await redis.hget('user:1', 'email')
# Get multiple fields
info = await redis.hmget('user:1', ['name', 'email', 'city'])
# Get all fields
all_data = await redis.hgetall('user:1')
# Increment numeric fields
await redis.hincrby('user:1', 'login_count', 1)
await redis.hincrbyfloat('user:1', 'balance', 25.50)async def sorted_set_examples():
redis = aioredis.Redis(decode_responses=True)
# Add players with scores
await redis.zadd('leaderboard', {
'player1': 100,
'player2': 150,
'player3': 75,
'player4': 200
})
# Get top players
top_3 = await redis.zrevrange('leaderboard', 0, 2, withscores=True)
print(f"Top 3: {top_3}") # [('player4', 200), ('player2', 150), ('player1', 100)]
# Get player rank
rank = await redis.zrevrank('leaderboard', 'player1') # 2 (0-based)
# Get players in score range
middle_players = await redis.zrangebyscore('leaderboard', 80, 160)
# Increment score
new_score = await redis.zincrby('leaderboard', 25, 'player3') # 100async def stream_examples():
redis = aioredis.Redis(decode_responses=True)
# Add entries to stream
entry_id1 = await redis.xadd('events', {
'type': 'user_login',
'user_id': '123',
'timestamp': '2023-01-01T10:00:00Z'
})
entry_id2 = await redis.xadd('events', {
'type': 'purchase',
'user_id': '123',
'amount': '29.99'
})
# Read from stream
entries = await redis.xrange('events', min='-', max='+')
for entry_id, fields in entries:
print(f"{entry_id}: {fields}")
# Create consumer group
await redis.xgroup_create('events', 'processors', id='0')
# Read as consumer group
messages = await redis.xreadgroup(
'processors', 'worker1',
{'events': '>'},
count=5, block=1000
)
# Acknowledge processed messages
if 'events' in messages:
message_ids = [msg[0] for msg in messages['events']]
await redis.xack('events', 'processors', *message_ids)Install with Tessl CLI
npx tessl i tessl/pypi-aioredisdocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10