A persistent cache for python requests
76
Cache backends provide persistent storage for HTTP responses with different performance characteristics, scalability options, and deployment requirements. Requests-cache supports multiple backends from simple in-memory caching to distributed storage solutions.
Central function for initializing any backend by name, class, or instance.
def init_backend(
cache_name: StrOrPath,
backend: Optional[BackendSpecifier] = None,
**kwargs
) -> BaseCache:
"""
Initialize a backend from name, class, or instance.
Parameters:
- cache_name: Cache identifier (path, namespace, etc.)
- backend: Backend name or instance
- **kwargs: Backend-specific configuration options
Returns:
Initialized backend instance
Available backend names:
'sqlite', 'redis', 'mongodb', 'gridfs', 'dynamodb', 'filesystem', 'memory'
"""File-based SQLite database backend offering good performance for single-process applications with optional WAL mode for better concurrency.
class SQLiteCache(BaseCache):
def __init__(
self,
db_path: StrOrPath = 'http_cache.sqlite',
use_cache_dir: bool = False,
use_temp: bool = False,
use_memory: bool = False,
busy_timeout: float = 30.0,
fast_save: bool = False,
wal: bool = False,
**kwargs
):
"""
SQLite-based cache backend.
Parameters:
- db_path: Database file path
- use_cache_dir: Store in user cache directory
- use_temp: Store in temporary directory
- use_memory: Use in-memory database (":memory:")
- busy_timeout: Seconds to wait for database locks
- fast_save: Skip fsync for better performance
- wal: Enable Write-Ahead Logging for concurrency
"""
class SQLiteDict(BaseStorage):
"""SQLite storage implementation for key-value operations."""from requests_cache import CachedSession
# Default SQLite backend
session = CachedSession('my_cache') # Creates http_cache.sqlite
# Custom SQLite configuration
session = CachedSession(
cache_name='custom_cache.db',
backend='sqlite',
use_cache_dir=True, # Store in user cache directory
wal=True, # Enable WAL mode
busy_timeout=60.0 # Wait up to 60 seconds for locks
)
# In-memory SQLite (lost on exit)
session = CachedSession(
backend='sqlite',
use_memory=True
)Redis-based backend for distributed caching with high performance and scalability options. Supports Redis Cluster and multiple data structures.
class RedisCache(BaseCache):
def __init__(
self,
namespace: str = 'http_cache',
connection: Optional[Redis] = None,
**kwargs
):
"""
Redis-based cache backend.
Parameters:
- namespace: Redis key prefix
- connection: Existing Redis connection
- **kwargs: Redis connection parameters (host, port, db, etc.)
"""
class RedisDict(BaseStorage):
"""Redis storage using separate keys for each cached response."""
class RedisHashDict(BaseStorage):
"""Redis storage using hash data structure for better memory efficiency."""from requests_cache import CachedSession
import redis
# Default Redis connection (localhost:6379)
session = CachedSession(
cache_name='my_cache',
backend='redis'
)
# Custom Redis configuration
session = CachedSession(
cache_name='api_cache',
backend='redis',
host='redis.example.com',
port=6380,
db=1,
password='secret'
)
# Use existing Redis connection
redis_client = redis.Redis(host='localhost', port=6379, db=0)
session = CachedSession(
cache_name='shared_cache',
backend='redis',
connection=redis_client
)MongoDB-based backends supporting both standard collections and GridFS for large responses.
class MongoCache(BaseCache):
def __init__(
self,
db_name: str = 'http_cache',
connection: Optional[MongoClient] = None,
**kwargs
):
"""
MongoDB-based cache backend.
Parameters:
- db_name: MongoDB database name
- connection: Existing MongoClient connection
- **kwargs: MongoDB connection parameters
"""
class MongoDict(BaseStorage):
"""MongoDB storage using standard collections."""
class GridFSCache(BaseCache):
def __init__(
self,
db_name: str = 'http_cache',
connection: Optional[MongoClient] = None,
**kwargs
):
"""
GridFS-based cache backend for large responses.
Parameters:
- db_name: MongoDB database name
- connection: Existing MongoClient connection
- **kwargs: MongoDB connection parameters
"""
class GridFSDict(BaseStorage):
"""GridFS storage for handling large response bodies."""from requests_cache import CachedSession
from pymongo import MongoClient
# Default MongoDB connection
session = CachedSession(
cache_name='web_cache',
backend='mongodb'
)
# Custom MongoDB configuration
session = CachedSession(
cache_name='api_cache',
backend='mongodb',
host='mongo.example.com',
port=27018,
username='cache_user',
password='password'
)
# GridFS for large responses
session = CachedSession(
cache_name='media_cache',
backend='gridfs'
)
# Use existing MongoDB connection
client = MongoClient('mongodb://localhost:27017/')
session = CachedSession(
cache_name='shared_cache',
backend='mongodb',
connection=client
)AWS DynamoDB backend for serverless and cloud-native applications with automatic scaling and global distribution.
class DynamoDbCache(BaseCache):
def __init__(
self,
table_name: str = 'http_cache',
namespace: str = 'default',
region_name: str = 'us-east-1',
**kwargs
):
"""
DynamoDB-based cache backend.
Parameters:
- table_name: DynamoDB table name
- namespace: Partition key prefix
- region_name: AWS region
- **kwargs: Additional boto3 configuration
"""
class DynamoDbDict(BaseStorage):
"""DynamoDB storage implementation."""from requests_cache import CachedSession
# Default DynamoDB configuration
session = CachedSession(
cache_name='api_cache',
backend='dynamodb',
table_name='my-cache-table'
)
# Custom AWS configuration
session = CachedSession(
cache_name='global_cache',
backend='dynamodb',
table_name='global-api-cache',
region_name='us-west-2',
aws_access_key_id='ACCESS_KEY',
aws_secret_access_key='SECRET_KEY'
)File-system based backend storing responses as individual files, useful for sharing cache between processes and debugging.
class FileCache(BaseCache):
def __init__(
self,
cache_name: StrOrPath = 'http_cache',
use_cache_dir: bool = False,
use_temp: bool = False,
**kwargs
):
"""
Filesystem-based cache backend.
Parameters:
- cache_name: Cache directory path
- use_cache_dir: Store in user cache directory
- use_temp: Store in temporary directory
"""
class FileDict(BaseStorage):
"""Filesystem storage using individual files."""from requests_cache import CachedSession
# Default filesystem backend
session = CachedSession(
cache_name='my_cache',
backend='filesystem' # Creates 'my_cache' directory
)
# Custom filesystem configuration
session = CachedSession(
cache_name='/path/to/cache',
backend='filesystem',
use_cache_dir=True # Use system cache directory
)In-memory backend using Python dictionaries. Fast but non-persistent, suitable for temporary caching within single process.
class BaseCache:
"""In-memory cache backend (when used with backend='memory')."""from requests_cache import CachedSession
# In-memory caching (lost when process exits)
session = CachedSession(
cache_name='temp_cache',
backend='memory'
)Guidance for choosing appropriate backends:
Foundation classes that all backends inherit from:
class BaseCache:
"""Base class for all cache backends."""
def clear(self) -> None:
"""Clear all cached responses."""
def delete(self, *args, **kwargs) -> int:
"""Delete responses matching conditions."""
def get_response(self, key: str) -> Optional[CachedResponse]:
"""Get cached response by key."""
def save_response(
self,
response: AnyResponse,
cache_key: str,
expires: Optional[datetime] = None
) -> None:
"""Save response to cache."""
class BaseStorage:
"""Base storage interface for key-value operations."""
class DictStorage(BaseStorage):
"""In-memory dict-based storage."""# Backend specification types
BackendSpecifier = Union[str, BaseCache]
StrOrPath = Union[Path, str]
# Backend name constants
BACKEND_CLASSES = {
'dynamodb': DynamoDbCache,
'filesystem': FileCache,
'gridfs': GridFSCache,
'memory': BaseCache,
'mongodb': MongoCache,
'redis': RedisCache,
'sqlite': SQLiteCache,
}Install with Tessl CLI
npx tessl i tessl/pypi-requests-cacheevals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10