Persistent cache implementation for httpx and httpcore following RFC 9111 specification
74
Pluggable storage implementations for persisting cached HTTP responses. Hishel provides multiple storage backends for different use cases, from local file storage to distributed Redis and cloud-based S3 storage.
Abstract base classes defining the storage interface for both synchronous and asynchronous operations.
class BaseStorage:
def __init__(self, *, serializer=None, ttl=None):
"""
Initialize synchronous storage backend.
Parameters:
- serializer: Serializer for request/response data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds for cached responses
"""
def store(self, key: str, response: Response, request: Request, metadata=None) -> None: ...
def retrieve(self, key: str) -> tuple[Response, Request, Metadata] | None: ...
def remove(self, key: str | Response) -> None: ...
def update_metadata(self, key: str, response: Response, request: Request, metadata: Metadata) -> None: ...
def close(self) -> None: ...
class AsyncBaseStorage:
def __init__(self, *, serializer=None, ttl=None):
"""
Initialize asynchronous storage backend.
Parameters:
- serializer: Serializer for request/response data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds for cached responses
"""
async def store(self, key: str, response: Response, request: Request, metadata=None) -> None: ...
async def retrieve(self, key: str) -> tuple[Response, Request, Metadata] | None: ...
async def remove(self, key: str | Response) -> None: ...
async def update_metadata(self, key: str, response: Response, request: Request, metadata: Metadata) -> None: ...
async def aclose(self) -> None: ...File-based storage that persists cached responses to the local filesystem.
class FileStorage(BaseStorage):
def __init__(self, *, serializer=None, base_path=None, ttl=None, check_ttl_every=60):
"""
File-based storage backend.
Parameters:
- serializer: Serializer for data (defaults to JSONSerializer)
- base_path: Directory path for cache files (defaults to .hishel_cache)
- ttl: Time-to-live in seconds
- check_ttl_every: Interval in seconds to check for expired files
"""
class AsyncFileStorage(AsyncBaseStorage):
def __init__(self, *, serializer=None, base_path=None, ttl=None, check_ttl_every=60):
"""
Async file-based storage backend.
Parameters:
- serializer: Serializer for data (defaults to JSONSerializer)
- base_path: Directory path for cache files (defaults to .hishel_cache)
- ttl: Time-to-live in seconds
- check_ttl_every: Interval in seconds to check for expired files
"""Usage Examples:
from pathlib import Path
import hishel
# Default file storage
storage = hishel.FileStorage()
# Custom path and TTL
storage = hishel.FileStorage(
base_path=Path("/tmp/my_cache"),
ttl=3600, # 1 hour
check_ttl_every=300 # Check every 5 minutes
)
# Custom serializer
storage = hishel.FileStorage(
serializer=hishel.PickleSerializer(),
base_path=Path("./cache")
)
with hishel.CacheClient(storage=storage) as client:
response = client.get("https://api.example.com/data")Redis-based storage for distributed caching across multiple application instances.
class RedisStorage(BaseStorage):
def __init__(self, *, client=None, serializer=None, ttl=None, namespace="hishel"):
"""
Redis-based storage backend.
Parameters:
- client: Redis client instance (creates default if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- namespace: Redis key namespace prefix
"""
class AsyncRedisStorage(AsyncBaseStorage):
def __init__(self, *, client=None, serializer=None, ttl=None, namespace="hishel"):
"""
Async Redis-based storage backend.
Parameters:
- client: Async Redis client instance (creates default if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- namespace: Redis key namespace prefix
"""Usage Examples:
import redis
import hishel
# Default Redis storage (connects to localhost:6379)
storage = hishel.RedisStorage()
# Custom Redis client
redis_client = redis.Redis(
host='cache.example.com',
port=6380,
password='secret',
db=1
)
storage = hishel.RedisStorage(
client=redis_client,
ttl=7200, # 2 hours
namespace="myapp_cache"
)
with hishel.CacheClient(storage=storage) as client:
response = client.get("https://api.example.com/data")SQLite-based storage for persistent local caching with SQL query capabilities.
class SQLiteStorage(BaseStorage):
def __init__(self, *, connection=None, serializer=None, ttl=None,
table_name="cache", create_table=True):
"""
SQLite-based storage backend.
Parameters:
- connection: SQLite connection (creates in-memory if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- table_name: Database table name for cache
- create_table: Whether to create table if it doesn't exist
"""
class AsyncSQLiteStorage(AsyncBaseStorage):
def __init__(self, *, connection=None, serializer=None, ttl=None,
table_name="cache", create_table=True):
"""
Async SQLite-based storage backend.
Parameters:
- connection: Async SQLite connection (creates in-memory if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- table_name: Database table name for cache
- create_table: Whether to create table if it doesn't exist
"""Usage Examples:
import sqlite3
import hishel
# Default SQLite storage (in-memory)
storage = hishel.SQLiteStorage()
# File-based SQLite
import sqlite3
conn = sqlite3.connect("cache.db")
storage = hishel.SQLiteStorage(
connection=conn,
table_name="http_cache",
ttl=86400 # 24 hours
)
with hishel.CacheClient(storage=storage) as client:
response = client.get("https://api.example.com/data")Fast in-memory storage with LRU eviction for high-performance caching within a single process.
class InMemoryStorage(BaseStorage):
def __init__(self, *, capacity=128, serializer=None, ttl=None):
"""
In-memory storage backend with LRU eviction.
Parameters:
- capacity: Maximum number of cached responses
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
"""
class AsyncInMemoryStorage(AsyncBaseStorage):
def __init__(self, *, capacity=128, serializer=None, ttl=None):
"""
Async in-memory storage backend with LRU eviction.
Parameters:
- capacity: Maximum number of cached responses
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
"""Usage Examples:
import hishel
# Default in-memory storage
storage = hishel.InMemoryStorage()
# Custom capacity and TTL
storage = hishel.InMemoryStorage(
capacity=1000,
ttl=1800 # 30 minutes
)
with hishel.CacheClient(storage=storage) as client:
response = client.get("https://api.example.com/data")AWS S3-based storage for distributed cloud caching across regions and environments.
class S3Storage(BaseStorage):
def __init__(self, *, bucket_name, client=None, serializer=None, ttl=None,
key_prefix="hishel/"):
"""
AWS S3-based storage backend.
Parameters:
- bucket_name: S3 bucket name for cache storage
- client: Boto3 S3 client (creates default if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- key_prefix: S3 key prefix for cache objects
"""
class AsyncS3Storage(AsyncBaseStorage):
def __init__(self, *, bucket_name, client=None, serializer=None, ttl=None,
key_prefix="hishel/"):
"""
Async AWS S3-based storage backend.
Parameters:
- bucket_name: S3 bucket name for cache storage
- client: Async Boto3 S3 client (creates default if None)
- serializer: Serializer for data (defaults to JSONSerializer)
- ttl: Time-to-live in seconds
- key_prefix: S3 key prefix for cache objects
"""Usage Examples:
import boto3
import hishel
# Default S3 storage
storage = hishel.S3Storage(bucket_name="my-cache-bucket")
# Custom S3 client and configuration
s3_client = boto3.client(
's3',
region_name='us-west-2',
aws_access_key_id='...',
aws_secret_access_key='...'
)
storage = hishel.S3Storage(
bucket_name="my-cache-bucket",
client=s3_client,
key_prefix="production/cache/",
ttl=14400 # 4 hours
)
with hishel.CacheClient(storage=storage) as client:
response = client.get("https://api.example.com/data")Different storage backends have optional dependencies:
# Redis storage
pip install hishel[redis]
# SQLite storage
pip install hishel[sqlite]
# S3 storage
pip install hishel[s3]
# YAML serializer
pip install hishel[yaml]
# All optional dependencies
pip install hishel[redis,sqlite,s3,yaml]Install with Tessl CLI
npx tessl i tessl/pypi-hisheldocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10