Python supercharged for fastai development
56
HTTP client functionality with comprehensive error handling, URL manipulation utilities, and socket programming helpers for network-based applications. The net module provides enhanced HTTP operations with better error reporting and simplified APIs for common networking tasks.
Enhanced HTTP operations with automatic error handling, progress tracking, and flexible response processing.
def urlopen(url, data=None, **kwargs):
"""
Open URL with enhanced error handling and automatic retries.
Enhanced version of urllib.request.urlopen with better error handling,
automatic header management, and support for various data types.
Parameters:
- url: str|Request, URL to open or Request object
- data: bytes|dict|str, data to send (automatically encoded)
- **kwargs: additional arguments passed to Request
Returns:
Response object with enhanced attributes
Raises:
Specific HTTP error classes (HTTP404NotFoundError, etc.)
"""
def urlread(url, decode=True, **kwargs):
"""
Read content from URL with automatic decoding.
Simplified interface for reading URL content with automatic
text decoding and comprehensive error handling.
Parameters:
- url: str, URL to read from
- decode: bool, automatically decode response as text (default: True)
- **kwargs: additional arguments for urlopen
Returns:
str|bytes: URL content (decoded text if decode=True, bytes otherwise)
Raises:
HTTP error classes for various response codes
"""
def urljson(url, **kwargs):
"""
Load JSON data from URL.
Convenience function to fetch and parse JSON from HTTP endpoints
with automatic error handling and response validation.
Parameters:
- url: str, URL returning JSON data
- **kwargs: additional arguments for urlopen
Returns:
dict|list: Parsed JSON object
Raises:
HTTP errors or JSON parsing errors with enhanced messages
"""
def urlcheck(url, **kwargs):
"""
Check if URL is accessible without downloading content.
Performs HEAD request to verify URL accessibility and get
metadata without downloading the full response body.
Parameters:
- url: str, URL to check
- **kwargs: additional arguments for request
Returns:
bool: True if URL is accessible (2xx response)
"""
def urlsave(url, dest=None, **kwargs):
"""
Download and save URL content to file.
Downloads content from URL and saves to specified destination
with automatic filename detection and progress tracking.
Parameters:
- url: str, URL to download
- dest: Path|str, destination path (auto-generated if None)
- **kwargs: additional arguments for download
Returns:
Path: Path to saved file
"""
def urlretrieve(url, filename=None, **kwargs):
"""
Enhanced version of urllib.request.urlretrieve.
Downloads file from URL with progress reporting and
better error handling than standard library version.
Parameters:
- url: str, URL to retrieve
- filename: str, local filename (auto-generated if None)
- **kwargs: additional arguments
Returns:
tuple: (filename, headers) similar to urllib.request.urlretrieve
"""
def urldest(url, dest=None, **kwargs):
"""
Determine destination path for URL download.
Intelligently determines appropriate local filename for
URL download based on URL structure and headers.
Parameters:
- url: str, source URL
- dest: Path|str, destination directory or filename
- **kwargs: additional arguments
Returns:
Path: Resolved destination path for download
"""Utilities for URL manipulation, validation, and cleaning.
def urlquote(url):
"""
Properly quote URL components for safe HTTP requests.
Applies URL encoding to path and query components while preserving
URL structure. Handles international characters and special symbols.
Parameters:
- url: str, URL to quote
Returns:
str: Properly quoted URL safe for HTTP requests
"""
def urlwrap(url, data=None, headers=None):
"""
Wrap URL in urllib Request object with proper quoting.
Creates Request object from URL with automatic quoting and
header management. Handles both string URLs and existing Request objects.
Parameters:
- url: str|Request, URL or existing Request object
- data: bytes|dict, request data (automatically encoded)
- headers: dict, HTTP headers to include
Returns:
Request: urllib.request.Request object ready for use
"""
def urlclean(url):
"""
Clean and normalize URL format.
Removes unnecessary components, normalizes encoding, and
ensures URL follows standard format conventions.
Parameters:
- url: str, URL to clean
Returns:
str: Cleaned and normalized URL
"""
def urlvalid(x):
"""
Check if string is a valid URL.
Validates URL format and structure without making network requests.
Checks for proper scheme, domain, and path formatting.
Parameters:
- x: str, string to validate as URL
Returns:
bool: True if string is valid URL format
"""
def urlrequest(url, verb='GET', **kwargs):
"""
Make HTTP request with specified method.
Flexible HTTP request function supporting various HTTP methods
with comprehensive parameter handling and error reporting.
Parameters:
- url: str, target URL
- verb: str, HTTP method (GET, POST, PUT, DELETE, etc.)
- **kwargs: additional request parameters (headers, data, etc.)
Returns:
Response object with status, headers, and content
"""
def urlsend(url, **kwargs):
"""
Send data to URL endpoint.
Simplified interface for sending data to HTTP endpoints with
automatic method selection and content-type handling.
Parameters:
- url: str, target URL
- **kwargs: request parameters including data payload
Returns:
Response object
"""
def do_request(url, verb='GET', **kwargs):
"""
Low-level HTTP request with full control.
Advanced HTTP request function with detailed control over
all request parameters and response handling.
Parameters:
- url: str, target URL
- verb: str, HTTP method
- **kwargs: comprehensive request configuration
Returns:
Detailed response object with metadata
"""Comprehensive HTTP error hierarchy with specific exception classes for each status code.
class HTTP4xxClientError(HTTPError):
"""
Base class for client-side HTTP errors (4xx status codes).
Provides enhanced error information and context for client errors
including detailed response information and suggested actions.
"""
class HTTP5xxServerError(HTTPError):
"""
Base class for server-side HTTP errors (5xx status codes).
Handles server errors with additional context and retry information
for applications that need to handle server failures gracefully.
"""
# Specific error classes for each HTTP status code:
class HTTP400BadRequestError(HTTP4xxClientError):
"""HTTP 400 Bad Request - Client sent invalid request."""
class HTTP401UnauthorizedError(HTTP4xxClientError):
"""HTTP 401 Unauthorized - Authentication required."""
class HTTP403ForbiddenError(HTTP4xxClientError):
"""HTTP 403 Forbidden - Access denied."""
class HTTP404NotFoundError(HTTP4xxClientError):
"""HTTP 404 Not Found - Resource not found."""
class HTTP405MethodNotAllowedError(HTTP4xxClientError):
"""HTTP 405 Method Not Allowed - HTTP method not supported."""
class HTTP408RequestTimeoutError(HTTP4xxClientError):
"""HTTP 408 Request Timeout - Request took too long."""
class HTTP429TooManyRequestsError(HTTP4xxClientError):
"""HTTP 429 Too Many Requests - Rate limit exceeded."""
class HTTP500InternalServerError(HTTP5xxServerError):
"""HTTP 500 Internal Server Error - Server encountered an error."""
class HTTP502BadGatewayError(HTTP5xxServerError):
"""HTTP 502 Bad Gateway - Upstream server error."""
class HTTP503ServiceUnavailableError(HTTP5xxServerError):
"""HTTP 503 Service Unavailable - Server temporarily unavailable."""
class HTTP504GatewayTimeoutError(HTTP5xxServerError):
"""HTTP 504 Gateway Timeout - Upstream server timeout."""Low-level networking functions for socket programming and server development.
def start_server(port, host='localhost', dgram=False):
"""
Start socket server with simplified interface.
Creates and configures socket server with sensible defaults
for common server patterns. Supports both TCP and UDP.
Parameters:
- port: int, port number to listen on
- host: str, hostname to bind to (default: 'localhost')
- dgram: bool, use UDP instead of TCP (default: False)
Returns:
socket: Configured server socket ready for accept()/recvfrom()
"""
def start_client(port, host='localhost', dgram=False):
"""
Start socket client connection.
Creates client socket and establishes connection to server
with automatic error handling and retry logic.
Parameters:
- port: int, server port to connect to
- host: str, server hostname (default: 'localhost')
- dgram: bool, use UDP instead of TCP (default: False)
Returns:
socket: Connected client socket ready for send()/recv()
"""
def tobytes(s):
"""
Convert string to bytes for socket transmission.
Handles string-to-bytes conversion with proper encoding
for network transmission. Handles various input types.
Parameters:
- s: str|bytes, data to convert
Returns:
bytes: Data ready for socket transmission
"""
def recv_once(sock):
"""
Receive data from socket with timeout handling.
Single receive operation with proper timeout and error handling.
Useful for non-blocking socket operations.
Parameters:
- sock: socket, socket to receive from
Returns:
bytes: Received data or None if timeout/error
"""
def http_response(s, status=200, hdrs=None):
"""
Generate HTTP response string.
Creates properly formatted HTTP response with headers
and content. Useful for simple HTTP server implementations.
Parameters:
- s: str, response body content
- status: int, HTTP status code (default: 200)
- hdrs: dict, additional HTTP headers
Returns:
str: Complete HTTP response ready for transmission
"""Network configuration utilities and default header management.
url_default_headers = {
"""
Default HTTP headers used by fastcore URL functions.
Comprehensive set of headers that mimic modern browser behavior
for better compatibility with web services and APIs.
Headers include:
- Accept: Comprehensive content type acceptance
- Accept-Language: Language preferences
- User-Agent: Modern browser user agent string
- Security headers for modern web standards
"""
}
def urlopener():
"""
Create URL opener with enhanced default headers.
Returns configured urllib.request.OpenerDirector with
sensible defaults for web scraping and API access.
Returns:
OpenerDirector: Configured URL opener with default headers
"""
ExceptionsHTTP = {}
"""
Dictionary mapping HTTP status codes to exception classes.
Provides programmatic access to HTTP exception classes by status code
for dynamic error handling and response processing.
Usage:
try:
response = urlopen(url)
except ExceptionsHTTP[404]:
print("Page not found")
"""from fastcore.net import urlread, urljson, urlsave, urlcheck
# Simple content reading
content = urlread("https://httpbin.org/get")
print(content) # Automatically decoded text
# JSON API access
api_data = urljson("https://api.github.com/users/octocat")
print(api_data['login']) # 'octocat'
# File downloading
file_path = urlsave(
"https://httpbin.org/image/png",
dest="downloaded_image.png"
)
# Check URL accessibility
if urlcheck("https://example.com"):
print("Site is accessible")
else:
print("Site is down or unreachable")
# Read binary content
image_data = urlread("https://httpbin.org/image/jpeg", decode=False)
with open("image.jpg", "wb") as f:
f.write(image_data)from fastcore.net import urlrequest, urlsend, do_request
import json
# Custom HTTP methods
response = urlrequest(
"https://httpbin.org/anything",
verb='POST',
headers={'Content-Type': 'application/json'},
data=json.dumps({"key": "value"})
)
# Send form data
form_response = urlsend(
"https://httpbin.org/post",
data={'username': 'alice', 'password': 'secret'}
)
# Low-level request control
detailed_response = do_request(
"https://httpbin.org/status/418",
verb='GET',
headers={'Custom-Header': 'test-value'},
timeout=30
)
# Handle different response types
def fetch_with_fallback(url):
try:
return urljson(url) # Try JSON first
except Exception:
return urlread(url) # Fall back to text
data = fetch_with_fallback("https://api.example.com/data")from fastcore.net import urlread, HTTP404NotFoundError, HTTP429TooManyRequestsError
import time
# Specific error handling
def robust_fetch(url, max_retries=3):
for attempt in range(max_retries):
try:
return urlread(url)
except HTTP404NotFoundError:
print(f"URL not found: {url}")
return None
except HTTP429TooManyRequestsError:
wait_time = 2 ** attempt # Exponential backoff
print(f"Rate limited, waiting {wait_time}s...")
time.sleep(wait_time)
except Exception as e:
print(f"Attempt {attempt + 1} failed: {e}")
if attempt == max_retries - 1:
raise
return None
# Use the robust fetcher
content = robust_fetch("https://api.example.com/data")
# Handle multiple error types
def safe_api_call(url):
try:
return urljson(url)
except (HTTP404NotFoundError, HTTP403ForbiddenError) as e:
print(f"Access error: {e}")
return {"error": "access_denied"}
except HTTP500InternalServerError as e:
print(f"Server error: {e}")
return {"error": "server_error"}
except Exception as e:
print(f"Unexpected error: {e}")
return {"error": "unknown"}from fastcore.net import urlquote, urlwrap, urlclean, urlvalid
# URL quoting for special characters
unsafe_url = "https://example.com/search?q=hello world&lang=en"
safe_url = urlquote(unsafe_url)
print(safe_url) # https://example.com/search?q=hello%20world&lang=en
# Request object creation
request = urlwrap(
"https://api.example.com/data",
data=b'{"query": "test"}',
headers={'Content-Type': 'application/json'}
)
# URL validation
urls_to_check = [
"https://example.com",
"ftp://files.example.com",
"not-a-url",
"http://localhost:8000/api"
]
valid_urls = [url for url in urls_to_check if urlvalid(url)]
print(valid_urls) # Only valid URLs
# URL cleaning
messy_url = "https://example.com///path//to/../resource?param=value&"
clean_url = urlclean(messy_url)
print(clean_url) # https://example.com/path/resource?param=valuefrom fastcore.net import start_server, start_client, tobytes, recv_once, http_response
import threading
# Simple echo server
def echo_server():
server_sock = start_server(8080, host='localhost')
print("Echo server listening on port 8080")
while True:
client_sock, addr = server_sock.accept()
print(f"Connection from {addr}")
data = recv_once(client_sock)
if data:
client_sock.send(tobytes(f"Echo: {data.decode()}"))
client_sock.close()
# Start server in background thread
server_thread = threading.Thread(target=echo_server, daemon=True)
server_thread.start()
# Simple client
def echo_client(message):
client_sock = start_client(8080, host='localhost')
client_sock.send(tobytes(message))
response = recv_once(client_sock)
client_sock.close()
return response.decode() if response else None
# Test the echo
response = echo_client("Hello, server!")
print(response) # "Echo: Hello, server!"
# Simple HTTP server response
def create_http_response(content, content_type="text/html"):
headers = {'Content-Type': content_type}
return http_response(content, status=200, hdrs=headers)
html_response = create_http_response("<h1>Hello, World!</h1>")
json_response = create_http_response(
'{"message": "success"}',
content_type="application/json"
)from fastcore.net import urlread, HTTP429TooManyRequestsError
from fastcore.parallel import parallel
import time
import random
# Rate-limited batch processing
def rate_limited_fetch(url, delay=1.0):
"""Fetch URL with rate limiting."""
time.sleep(delay + random.uniform(0, 0.5)) # Add jitter
return urlread(url)
# Fetch multiple URLs with rate limiting
urls = [
f"https://httpbin.org/delay/{i}"
for i in range(1, 6)
]
# Sequential with rate limiting
results = []
for url in urls:
try:
result = rate_limited_fetch(url, delay=0.5)
results.append(result)
except Exception as e:
print(f"Failed to fetch {url}: {e}")
results.append(None)
# Parallel with controlled concurrency
def safe_parallel_fetch(urls, n_workers=2, delay=0.5):
"""Fetch URLs in parallel with rate limiting."""
def fetch_with_delay(url):
return rate_limited_fetch(url, delay)
return parallel(
fetch_with_delay,
urls,
n_workers=n_workers # Limit concurrency
)
parallel_results = safe_parallel_fetch(urls, n_workers=2)
# Retry logic with exponential backoff
def fetch_with_retry(url, max_retries=3, base_delay=1.0):
"""Fetch URL with exponential backoff retry."""
for attempt in range(max_retries):
try:
return urlread(url)
except HTTP429TooManyRequestsError:
if attempt < max_retries - 1:
delay = base_delay * (2 ** attempt)
print(f"Rate limited, retrying in {delay}s...")
time.sleep(delay)
else:
raise
except Exception as e:
if attempt == max_retries - 1:
raise
time.sleep(base_delay)
return None
# Robust batch processing
robust_results = [
fetch_with_retry(url)
for url in urls[:3] # Process subset
]from fastcore.net import urljson, urlread
from fastcore.foundation import L
from fastcore.basics import listify
# Process URLs with L collections
api_urls = L([
"https://api.github.com/users/octocat",
"https://api.github.com/users/defunkt",
"https://api.github.com/users/pjhyett"
])
# Fetch all user data
def get_user_data(url):
try:
return urljson(url)
except Exception as e:
return {"error": str(e), "url": url}
users = api_urls.map(get_user_data)
# Extract specific fields
usernames = users.map(lambda u: u.get('login', 'unknown'))
follower_counts = users.map(lambda u: u.get('followers', 0))
# Filter successful responses
valid_users = users.filter(lambda u: 'error' not in u)
# Create summary
summary = {
'total_users': len(users),
'successful_fetches': len(valid_users),
'usernames': list(usernames),
'total_followers': sum(follower_counts)
}
print(f"Processed {summary['total_users']} users")
print(f"Total followers: {summary['total_followers']}")Install with Tessl CLI
npx tessl i tessl/pypi-fastcoredocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10