HTTP library with thread-safe connection pooling, file post support, user friendly interface, and more.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Configuration classes for retry behavior, timeouts, and request customization. These objects provide fine-grained control over urllib3's behavior and error handling strategies.
Controls retry behavior for various types of failures including connection errors, read timeouts, HTTP status codes, and redirects.
class Retry:
def __init__(self, total=10, connect=None, read=None, redirect=None,
status=None, other=None, allowed_methods=None,
status_forcelist=None, backoff_factor=0, backoff_max=120,
raise_on_redirect=True, raise_on_status=True, history=None, **kw):
"""
Retry configuration for handling various failure types.
Parameters:
- total: Total number of retry attempts across all failure types
- connect: Number of connection-related retry attempts
- read: Number of read timeout retry attempts
- redirect: Number of redirect retry attempts
- status: Number of HTTP status code retry attempts
- other: Number of other error retry attempts
- allowed_methods: HTTP methods that allow retries (None for default set)
- status_forcelist: HTTP status codes that force retries
- backoff_factor: Exponential backoff factor between retries
- backoff_max: Maximum backoff time in seconds
- raise_on_redirect: Whether to raise MaxRetryError on redirect failures
- raise_on_status: Whether to raise MaxRetryError on status failures
- history: List of previous retry attempts (internal use)
"""
@classmethod
def from_int(cls, retries: int) -> 'Retry':
"""Create Retry object from integer (backwards compatibility)"""
def new(self, **kw) -> 'Retry':
"""Create new Retry object with updated parameters"""
def increment(self, method: str = None, url: str = None,
response=None, error=None, _pool=None, _stacktrace=None) -> 'Retry':
"""
Increment retry counter and return new Retry object.
Raises MaxRetryError if retries exhausted.
"""
def is_retry(self, method: str, status_code: int, has_retry_after: bool = False) -> bool:
"""Check if request should be retried based on method and status"""
def get_backoff_time(self) -> float:
"""Calculate backoff time for next retry attempt"""
def sleep(self, response=None):
"""Sleep for calculated backoff time"""Controls connection and read timeouts with support for different timeout values for different phases of the request.
class Timeout:
def __init__(self, total=None, connect=None, read=None):
"""
Timeout configuration for requests.
Parameters:
- total: Total timeout for entire request (float or None)
- connect: Timeout for initial connection (float or None)
- read: Timeout for reading response data (float or None)
"""
@classmethod
def from_float(cls, timeout: float) -> 'Timeout':
"""Create Timeout object from float (backwards compatibility)"""
def clone(self) -> 'Timeout':
"""Create copy of timeout configuration"""
@property
def connect_timeout(self) -> float:
"""Get effective connect timeout"""
@property
def read_timeout(self) -> float:
"""Get effective read timeout"""# Default retry configuration
DEFAULT_RETRY = Retry(total=3, redirect=3, connect=3, read=3, status=3)
# Default timeout (infinite)
DEFAULT_TIMEOUT = Timeout(total=None, connect=None, read=None)import urllib3
# Simple retry configuration
retry = urllib3.Retry(total=5)
http = urllib3.PoolManager(retries=retry)
resp = http.request('GET', 'https://httpbin.org/status/500')import urllib3
# Comprehensive retry configuration
retry = urllib3.Retry(
total=5, # Maximum 5 retry attempts total
connect=3, # Maximum 3 connection retries
read=3, # Maximum 3 read timeout retries
redirect=2, # Maximum 2 redirect retries
status=3, # Maximum 3 status code retries
backoff_factor=0.3, # Exponential backoff: 0.6, 1.2, 2.4, ...
backoff_max=120, # Maximum 2 minutes between retries
status_forcelist=[429, 500, 502, 503, 504], # Always retry these status codes
allowed_methods=['GET', 'PUT', 'DELETE'] # Only retry these methods
)
http = urllib3.PoolManager(retries=retry)
resp = http.request('GET', 'https://httpbin.org/status/503')import urllib3
# Basic timeout configuration
timeout = urllib3.Timeout(total=30.0) # 30 second total timeout
http = urllib3.PoolManager(timeout=timeout)
resp = http.request('GET', 'https://httpbin.org/delay/5')import urllib3
# Separate timeouts for different phases
timeout = urllib3.Timeout(
connect=5.0, # 5 seconds to establish connection
read=30.0 # 30 seconds to read response
)
http = urllib3.PoolManager(timeout=timeout)
resp = http.request('GET', 'https://httpbin.org/delay/10')import urllib3
# Default configuration
http = urllib3.PoolManager(
retries=urllib3.Retry(total=3),
timeout=urllib3.Timeout(total=10.0)
)
# Override for specific request
custom_retry = urllib3.Retry(total=10, backoff_factor=1.0)
custom_timeout = urllib3.Timeout(total=60.0)
resp = http.request('GET', 'https://httpbin.org/delay/5',
retries=custom_retry,
timeout=custom_timeout)import urllib3
# Retry on rate limiting and server errors
retry = urllib3.Retry(
total=10,
status_forcelist=[429, 500, 502, 503, 504],
backoff_factor=1.0, # 1 second, 2 seconds, 4 seconds, ...
respect_retry_after_header=True # Honor Retry-After header
)
http = urllib3.PoolManager(retries=retry)
# This will retry on 429 Too Many Requests
resp = http.request('GET', 'https://httpbin.org/status/429')import urllib3
# Only retry safe methods (idempotent operations)
retry = urllib3.Retry(
total=5,
allowed_methods=['GET', 'HEAD', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']
)
http = urllib3.PoolManager(retries=retry)
# GET request will be retried on failure
get_resp = http.request('GET', 'https://httpbin.org/status/500')
# POST request will NOT be retried (not in allowed_methods)
post_resp = http.request('POST', 'https://httpbin.org/status/500',
fields={'data': 'value'})import urllib3
# Very short timeouts for fast failure
fast_timeout = urllib3.Timeout(
connect=0.5, # 500ms to connect
read=1.0 # 1 second to read response
)
# Long timeouts for slow operations
slow_timeout = urllib3.Timeout(
connect=10.0, # 10 seconds to connect
read=300.0 # 5 minutes to read response
)
http = urllib3.PoolManager()
# Fast timeout for health check
try:
health_resp = http.request('GET', 'https://api.example.com/health',
timeout=fast_timeout)
except urllib3.exceptions.TimeoutError:
print("Service unhealthy - fast timeout")
# Slow timeout for data processing endpoint
data_resp = http.request('POST', 'https://api.example.com/process-data',
json={'large': 'dataset'},
timeout=slow_timeout)import urllib3
# Aggressive retry with reasonable timeouts
retry_config = urllib3.Retry(
total=10,
connect=5,
read=5,
backoff_factor=0.5,
status_forcelist=[429, 500, 502, 503, 504]
)
timeout_config = urllib3.Timeout(
connect=3.0,
read=10.0
)
http = urllib3.PoolManager(
retries=retry_config,
timeout=timeout_config
)
# Will retry up to 10 times with exponential backoff
# Each attempt has 3s connect + 10s read timeout
resp = http.request('GET', 'https://unreliable-api.example.com/data')import urllib3
# Disable all retries
no_retry = urllib3.Retry(total=False)
# Or use integer 0
no_retry = urllib3.Retry(0)
http = urllib3.PoolManager(retries=no_retry)
# Request will fail immediately on any error
try:
resp = http.request('GET', 'https://httpbin.org/status/500')
except urllib3.exceptions.MaxRetryError as e:
print(f"Request failed: {e}")import urllib3
# Robust production configuration
production_retry = urllib3.Retry(
total=5, # Reasonable retry limit
connect=3, # Network issues are common
read=3, # Server processing issues
redirect=3, # Handle redirects
backoff_factor=0.3, # Exponential backoff
status_forcelist=[429, 502, 503, 504], # Temporary server issues
allowed_methods=['GET', 'HEAD', 'PUT', 'DELETE'] # Safe methods only
)
production_timeout = urllib3.Timeout(
connect=5.0, # Reasonable connection time
read=30.0 # Allow time for processing
)
http = urllib3.PoolManager(
retries=production_retry,
timeout=production_timeout
)import urllib3
# Fast-fail configuration for development
dev_retry = urllib3.Retry(total=1) # Minimal retries
dev_timeout = urllib3.Timeout(total=5.0) # Short timeout
http = urllib3.PoolManager(
retries=dev_retry,
timeout=dev_timeout
)Install with Tessl CLI
npx tessl i tessl/pypi-urllib3