A persistent cache for python requests
76
Global monkey-patching enables caching for all requests functions by replacing the standard requests.Session with a cached version. This approach requires no code changes and affects all requests made through the standard requests module functions.
Warning: These functions are not thread-safe. Use CachedSession for multi-threaded environments.
Functions to globally install and manage caching for all requests functions.
def install_cache(
cache_name: str = 'http_cache',
backend: Optional[BackendSpecifier] = None,
session_factory: Type[OriginalSession] = CachedSession,
**kwargs
):
"""
Install cache for all requests functions by monkey-patching requests.Session.
Parameters:
- cache_name: Cache path, prefix, or namespace
- backend: Backend name or instance
- session_factory: Session class to use (must inherit from CachedSession/CacheMixin)
- **kwargs: Additional arguments passed to session_factory
After installation, all requests.get(), requests.post(), etc. will use caching.
"""
def uninstall_cache():
"""
Disable caching by restoring original requests.Session.
"""
def get_cache() -> Optional[BaseCache]:
"""
Get the internal cache object from currently installed CachedSession.
Returns:
BaseCache instance if caching is installed, None otherwise
"""
def is_installed() -> bool:
"""
Check whether requests-cache is currently installed.
Returns:
True if monkey-patching is active, False otherwise
"""Basic installation:
import requests_cache
import requests
# Install caching globally
requests_cache.install_cache('demo_cache')
# Now all requests functions use caching
response = requests.get('https://httpbin.org/get')
print(f"From cache: {response.from_cache}")
# Subsequent identical requests served from cache
response2 = requests.get('https://httpbin.org/get')
print(f"From cache: {response2.from_cache}") # True
# Remove caching
requests_cache.uninstall_cache()Advanced configuration:
import requests_cache
from datetime import timedelta
requests_cache.install_cache(
cache_name='advanced_cache',
backend='redis',
expire_after=timedelta(hours=1),
allowable_codes=[200, 404],
allowable_methods=['GET', 'POST'],
cache_control=True
)
# Check if caching is active
if requests_cache.is_installed():
cache = requests_cache.get_cache()
print(f"Cache contains {len(cache)} responses")Context managers for temporary caching control without affecting global state.
def enabled(*args, **kwargs):
"""
Context manager for temporarily enabling caching.
Parameters:
- *args, **kwargs: Same as install_cache()
Usage:
with requests_cache.enabled('temp_cache'):
response = requests.get('https://example.com')
"""
def disabled():
"""
Context manager for temporarily disabling caching.
Usage:
with requests_cache.disabled():
response = requests.get('https://example.com') # Not cached
"""Temporary caching:
import requests
import requests_cache
# Normal requests (no caching by default)
response1 = requests.get('https://httpbin.org/get')
# Temporarily enable caching
with requests_cache.enabled('temp_cache', expire_after=300):
response2 = requests.get('https://httpbin.org/get') # Cached
response3 = requests.get('https://httpbin.org/get') # From cache
# Back to normal requests (no caching)
response4 = requests.get('https://httpbin.org/get')Temporary disable:
import requests
import requests_cache
# Install caching globally
requests_cache.install_cache('global_cache')
response1 = requests.get('https://httpbin.org/get') # Cached
# Temporarily disable caching
with requests_cache.disabled():
response2 = requests.get('https://httpbin.org/get') # Not cached
response3 = requests.get('https://httpbin.org/get') # Back to cachingFunctions to manage cached data globally when caching is installed.
def clear():
"""
Clear the currently installed cache.
Removes all cached responses if caching is installed.
No effect if caching is not installed.
"""
def delete(*args, **kwargs):
"""
Remove responses from cache according to conditions.
Parameters:
- Same as BaseCache.delete() method
Common usage:
- delete(urls=['https://example.com']) # Delete specific URLs
- delete(expired=True) # Delete expired responses only
"""Cache management:
import requests
import requests_cache
requests_cache.install_cache('demo_cache')
# Make some requests
requests.get('https://httpbin.org/get')
requests.get('https://httpbin.org/json')
requests.get('https://httpbin.org/headers')
# Check cache status
cache = requests_cache.get_cache()
print(f"Cache contains {len(cache)} responses")
# Clear specific URLs
requests_cache.delete(urls=['https://httpbin.org/get'])
# Clear expired responses
requests_cache.delete(expired=True)
# Clear all cached data
requests_cache.clear()
print(f"Cache now contains {len(requests_cache.get_cache())} responses")The monkey-patching approach is designed to work with existing codebases that use the standard requests library:
# Existing code using requests
import requests
def fetch_api_data(url):
response = requests.get(url, headers={'Accept': 'application/json'})
return response.json()
def post_data(url, data):
return requests.post(url, json=data)
# Add caching with one line - no code changes needed
import requests_cache
requests_cache.install_cache('api_cache', expire_after=3600)
# Existing functions now use caching automatically
data = fetch_api_data('https://api.example.com/data')
result = post_data('https://api.example.com/submit', {'key': 'value'})Global monkey-patching functions are not thread-safe. In multi-threaded applications, use session-based caching instead:
# Instead of this (not thread-safe):
requests_cache.install_cache('cache')
# Use this for multi-threaded apps:
from requests_cache import CachedSession
session = CachedSession('cache')
# Pass session to threads or use threading.local
import threading
thread_local = threading.local()
def get_session():
if not hasattr(thread_local, 'session'):
thread_local.session = CachedSession('thread_cache')
return thread_local.session# Session factory type for install_cache
Type[OriginalSession] = Type[requests.Session]Install with Tessl CLI
npx tessl i tessl/pypi-requests-cacheevals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10