A library for creating GraphQL APIs using dataclasses and type annotations with extensive framework integration support.
Utilities for efficient data loading, file uploads, type conversions, and common GraphQL patterns. These utilities help solve common problems in GraphQL API development like the N+1 query problem and provide helpful tools for schema manipulation.
Batch loading utility for efficient data fetching and N+1 query problem prevention.
class DataLoader:
"""Batch loading utility for efficient data fetching."""
def __init__(
self,
load_fn: Callable[[List[Any]], Awaitable[List[Any]]],
*,
batch: bool = True,
max_batch_size: int = None,
batch_scheduler: Callable = None,
cache: bool = True,
cache_key_fn: Callable[[Any], Any] = None,
cache_map: AbstractCache = None
):
"""
Initialize DataLoader.
Args:
load_fn: Async function that takes list of keys and returns list of values
batch: Whether to batch requests
max_batch_size: Maximum size of batches
batch_scheduler: Custom batch scheduling function
cache: Whether to cache results
cache_key_fn: Function to generate cache keys
cache_map: Custom cache implementation
"""
async def load(self, key: Any) -> Any:
"""
Load single value by key.
Args:
key: Key to load
Returns:
Loaded value
"""
async def load_many(self, keys: List[Any]) -> List[Any]:
"""
Load multiple values by keys.
Args:
keys: List of keys to load
Returns:
List of loaded values in same order as keys
"""
def clear(self, key: Any) -> "DataLoader":
"""
Clear cache for specific key.
Args:
key: Key to clear from cache
Returns:
DataLoader instance for chaining
"""
def clear_all(self) -> "DataLoader":
"""
Clear all cached values.
Returns:
DataLoader instance for chaining
"""
def prime(self, key: Any, value: Any) -> "DataLoader":
"""
Prime cache with key-value pair.
Args:
key: Key to prime
value: Value to cache
Returns:
DataLoader instance for chaining
"""Usage Example:
import asyncio
from strawberry.dataloader import DataLoader
# Define batch loading function
async def load_users_by_ids(user_ids: List[str]) -> List[Optional[User]]:
"""Load users by IDs from database."""
print(f"Batch loading users: {user_ids}") # This should only print once per batch
# Simulate database query
users_data = await database.fetch_users_by_ids(user_ids)
# Create lookup dictionary
user_lookup = {str(user.id): user for user in users_data}
# Return users in same order as requested IDs
return [user_lookup.get(user_id) for user_id in user_ids]
# Create DataLoader
user_loader = DataLoader(load_users_by_ids)
# Usage in resolvers
@strawberry.type
class Post:
id: strawberry.ID
title: str
author_id: str
@strawberry.field
async def author(self, info: strawberry.Info) -> Optional[User]:
# DataLoader automatically batches these requests
return await info.context.user_loader.load(self.author_id)
@strawberry.type
class Query:
@strawberry.field
async def posts(self, info: strawberry.Info) -> List[Post]:
posts = await get_all_posts()
# Even though we're loading author for each post,
# DataLoader will batch all author requests into a single query
return posts
# Context setup
class Context:
def __init__(self):
self.user_loader = DataLoader(load_users_by_ids)
async def get_context():
return Context()Abstract cache interface for custom cache implementations.
class AbstractCache:
"""Abstract interface for DataLoader cache implementations."""
def get(self, key: Any) -> Any:
"""Get value from cache."""
def set(self, key: Any, value: Any) -> None:
"""Set value in cache."""
def delete(self, key: Any) -> None:
"""Delete value from cache."""
def clear(self) -> None:
"""Clear all cache values."""Custom Cache Example:
import redis
from strawberry.dataloader import AbstractCache
class RedisCache(AbstractCache):
def __init__(self, redis_client: redis.Redis, ttl: int = 300):
self.redis = redis_client
self.ttl = ttl
def get(self, key):
cached = self.redis.get(f"dataloader:{key}")
return pickle.loads(cached) if cached else None
def set(self, key, value):
self.redis.setex(
f"dataloader:{key}",
self.ttl,
pickle.dumps(value)
)
def delete(self, key):
self.redis.delete(f"dataloader:{key}")
def clear(self):
for key in self.redis.scan_iter(match="dataloader:*"):
self.redis.delete(key)
# Use custom cache
redis_client = redis.Redis()
user_loader = DataLoader(
load_users_by_ids,
cache_map=RedisCache(redis_client, ttl=600)
)Internal types used by DataLoader implementation.
class LoaderTask:
"""Internal task representation for DataLoader."""
pass
class Batch:
"""Internal batch representation for DataLoader."""
pass# DataLoader with custom cache key function
def user_cache_key(user_id: str) -> str:
return f"user:{user_id}"
user_loader = DataLoader(
load_users_by_ids,
cache_key_fn=user_cache_key
)
# DataLoader with limited batch size
large_data_loader = DataLoader(
load_large_data,
max_batch_size=50 # Limit batch size for large operations
)
# DataLoader without caching (for frequently changing data)
realtime_loader = DataLoader(
load_realtime_data,
cache=False
)
# Priming DataLoader cache
async def prime_user_cache(users: List[User]):
for user in users:
user_loader.prime(user.id, user)
# DataLoader composition
class UserService:
def __init__(self):
self.user_loader = DataLoader(self._load_users)
self.user_profile_loader = DataLoader(self._load_profiles)
async def _load_users(self, user_ids: List[str]) -> List[User]:
return await database.get_users_by_ids(user_ids)
async def _load_profiles(self, user_ids: List[str]) -> List[UserProfile]:
return await database.get_profiles_by_user_ids(user_ids)
async def get_user_with_profile(self, user_id: str) -> UserWithProfile:
user, profile = await asyncio.gather(
self.user_loader.load(user_id),
self.user_profile_loader.load(user_id)
)
return UserWithProfile(user=user, profile=profile)File upload support for GraphQL mutations with multipart/form-data requests.
class Upload:
"""File upload scalar type for multipart requests."""
filename: str # Original filename
content_type: str # MIME content type
def read(self, size: int = -1) -> bytes:
"""Read file content."""
async def read_async(self, size: int = -1) -> bytes:
"""Read file content asynchronously."""
def seek(self, offset: int) -> None:
"""Seek to position in file."""
def close(self) -> None:
"""Close file handle."""Usage Example:
from strawberry.file_uploads import Upload
@strawberry.type
class UploadResult:
success: bool
filename: str
size: int
url: Optional[str]
@strawberry.type
class Mutation:
@strawberry.mutation
async def upload_file(
self,
file: Upload,
description: str = ""
) -> UploadResult:
# Read file content
content = await file.read_async()
# Save file to storage
file_path = f"uploads/{file.filename}"
await save_file_to_storage(file_path, content)
return UploadResult(
success=True,
filename=file.filename,
size=len(content),
url=f"/files/{file.filename}"
)
@strawberry.mutation
async def upload_multiple_files(
self,
files: List[Upload]
) -> List[UploadResult]:
results = []
for file in files:
content = await file.read_async()
file_path = f"uploads/{file.filename}"
await save_file_to_storage(file_path, content)
results.append(UploadResult(
success=True,
filename=file.filename,
size=len(content),
url=f"/files/{file.filename}"
))
return resultsFrontend Usage (JavaScript):
// Single file upload
const mutation = `
mutation UploadFile($file: Upload!, $description: String) {
uploadFile(file: $file, description: $description) {
success
filename
size
url
}
}
`;
const variables = {
file: file, // File object from input element
description: "My uploaded file"
};
// Multiple file upload
const multiMutation = `
mutation UploadMultipleFiles($files: [Upload!]!) {
uploadMultipleFiles(files: $files) {
success
filename
size
}
}
`;Utilities for programmatically creating and manipulating GraphQL types.
def create_type(
name: str,
fields: Dict[str, Any],
*,
description: str = None,
interfaces: List[Type] = None,
directives: List = None
) -> Type:
"""
Programmatically create GraphQL types.
Args:
name: Type name
fields: Dictionary of field name to field definition
description: Type description
interfaces: Interfaces this type implements
directives: GraphQL directives to apply
Returns:
New GraphQL type
"""
def merge_types(
name: str,
types: List[Type],
*,
description: str = None
) -> Type:
"""
Merge multiple GraphQL types into a single type.
Args:
name: New type name
types: List of types to merge
description: Merged type description
Returns:
Merged GraphQL type
"""Usage Examples:
from strawberry.tools import create_type, merge_types
# Create type programmatically
UserType = create_type(
"User",
{
"id": strawberry.ID,
"name": str,
"email": str,
"age": int
},
description="User account information"
)
# Create type with methods
def get_full_name(self) -> str:
return f"{self.first_name} {self.last_name}"
PersonType = create_type(
"Person",
{
"first_name": str,
"last_name": str,
"full_name": strawberry.field(resolver=get_full_name)
}
)
# Merge multiple types
@strawberry.type
class BaseUser:
id: strawberry.ID
name: str
@strawberry.type
class UserPreferences:
theme: str
language: str
# Merge into single type
ExtendedUser = merge_types(
"ExtendedUser",
[BaseUser, UserPreferences],
description="User with preferences"
)Print GraphQL schema in SDL (Schema Definition Language) format.
def print_schema(schema: Schema) -> str:
"""
Print GraphQL schema as SDL string.
Args:
schema: GraphQL schema to print
Returns:
Schema Definition Language string
"""Usage Example:
from strawberry.printer import print_schema
schema = strawberry.Schema(query=Query, mutation=Mutation)
sdl = print_schema(schema)
print(sdl)
# Output:
# type Query {
# users: [User!]!
# user(id: ID!): User
# }
#
# type User {
# id: ID!
# name: String!
# email: String!
# }Constants and utilities for GraphQL subscriptions.
# WebSocket protocol constants
GRAPHQL_TRANSPORT_WS_PROTOCOL: str # Modern GraphQL-WS transport protocol
GRAPHQL_WS_PROTOCOL: str # Legacy GraphQL-WS protocolUsage Example:
from strawberry.subscriptions import (
GRAPHQL_TRANSPORT_WS_PROTOCOL,
GRAPHQL_WS_PROTOCOL
)
# Use in ASGI app
app = strawberry.asgi.GraphQL(
schema,
subscription_protocols=[
GRAPHQL_TRANSPORT_WS_PROTOCOL,
GRAPHQL_WS_PROTOCOL # For backwards compatibility
]
)Generate Python code from GraphQL queries.
class CodegenFile:
"""Generated code file."""
path: str
content: str
class CodegenResult:
"""Result of code generation."""
files: List[CodegenFile]
class QueryCodegen:
"""Generate code from GraphQL queries."""
def __init__(self, schema: Schema): ...
def generate(
self,
query: str,
*,
target_language: str = "python",
plugins: List[QueryCodegenPlugin] = None
) -> CodegenResult: ...
class QueryCodegenPlugin:
"""Base class for codegen plugins."""
pass
class ConsolePlugin(QueryCodegenPlugin):
"""Plugin for console output."""
passGenerate Strawberry code from GraphQL SDL.
def codegen(
schema_sdl: str,
*,
output_dir: str = None,
plugins: List[str] = None
) -> None:
"""
Generate Strawberry code from GraphQL SDL.
Args:
schema_sdl: GraphQL Schema Definition Language string
output_dir: Directory to write generated files
plugins: List of codegen plugins to use
"""Usage Example:
from strawberry.codegen import codegen
schema_sdl = """
type User {
id: ID!
name: String!
email: String!
posts: [Post!]!
}
type Post {
id: ID!
title: String!
content: String!
author: User!
}
type Query {
users: [User!]!
posts: [Post!]!
}
"""
# Generate Strawberry code from SDL
codegen(
schema_sdl,
output_dir="./generated",
plugins=["strawberry"]
)Command-line interface for Strawberry operations.
def run() -> None:
"""Main CLI application entry point."""
# Available commands:
# strawberry server - Development server
# strawberry export-schema - Export schema SDL
# strawberry codegen - Generate code from SDL
# strawberry schema-codegen - Generate types from queriesCLI Usage Examples:
# Start development server
strawberry server myapp.schema:schema --host 0.0.0.0 --port 8000
# Export schema to file
strawberry export-schema myapp.schema:schema --output schema.graphql
# Generate code from SDL
strawberry codegen --schema schema.graphql --output generated/
# Generate types from queries
strawberry schema-codegen --schema myapp.schema:schema --queries queries/ --output types.pyfrom functools import lru_cache
import asyncio
class FieldCache:
"""Simple field-level caching utility."""
def __init__(self, ttl: int = 300):
self.ttl = ttl
self.cache = {}
def cached_field(self, ttl: int = None):
def decorator(func):
@functools.wraps(func)
async def wrapper(*args, **kwargs):
cache_key = f"{func.__name__}:{hash(str(args + tuple(kwargs.items())))}"
if cache_key in self.cache:
value, timestamp = self.cache[cache_key]
if time.time() - timestamp < (ttl or self.ttl):
return value
result = await func(*args, **kwargs)
self.cache[cache_key] = (result, time.time())
return result
return wrapper
return decorator
# Usage
field_cache = FieldCache(ttl=600)
@strawberry.type
class User:
id: strawberry.ID
name: str
@field_cache.cached_field(ttl=300)
@strawberry.field
async def expensive_computation(self) -> str:
# Expensive operation that benefits from caching
await asyncio.sleep(1) # Simulate expensive operation
return f"computed_value_for_{self.id}"Install with Tessl CLI
npx tessl i tessl/pypi-strawberry-graphql