Asynchronous Python ODM for MongoDB with modern Pydantic-based document mapping
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Type-safe field definitions with MongoDB-specific types, automatic indexing capabilities, and document relationships with lazy loading support. These components enable rich data modeling with proper validation and efficient queries.
MongoDB ObjectId field types that integrate seamlessly with Pydantic validation and serialization.
class PydanticObjectId(ObjectId):
"""Pydantic-compatible ObjectId field type for document IDs."""
def __init__(self, value: Union[str, ObjectId, None] = None):
"""Initialize ObjectId from string, ObjectId, or generate new one."""
...
def __str__(self) -> str:
"""String representation of ObjectId."""
...
def __repr__(self) -> str:
"""Developer representation of ObjectId."""
...
@classmethod
def __get_validators__(cls):
"""Pydantic validators for ObjectId validation."""
...
@classmethod
def validate(cls, value: Any) -> "PydanticObjectId":
"""Validate and convert input to PydanticObjectId."""
...
# Alias for backward compatibility
BeanieObjectId = PydanticObjectIdfrom beanie import Document, PydanticObjectId
from typing import Optional
class Product(Document):
id: Optional[PydanticObjectId] = None
name: str
category_id: PydanticObjectId
class Settings:
collection = "products"
# Create with ObjectId
product = Product(
name="Laptop",
category_id=PydanticObjectId("507f1f77bcf86cd799439011")
)
await product.insert()
# Access as string
print(f"Product ID: {product.id}")
print(f"Category ID: {str(product.category_id)}")Special field types for building MongoDB queries and operations programmatically.
class ExpressionField(str):
"""String-based field expressions for query building."""
def __getitem__(self, item: str) -> "ExpressionField":
"""Get sub-field for nested document queries."""
...
def __getattr__(self, item: str) -> "ExpressionField":
"""Access nested fields using attribute notation."""
...from beanie import Document, ExpressionField
class User(Document):
name: str
profile: Dict[str, Any]
# Create expression fields for queries
user_name = ExpressionField("name")
profile_age = ExpressionField("profile.age")
# Use in queries
users = await User.find(user_name == "Alice").to_list()
adults = await User.find(profile_age >= 18).to_list()Field annotations that automatically create MongoDB indexes during initialization, supporting various index types and options.
class IndexedAnnotation:
"""Internal annotation class for indexed fields."""
_indexed: Tuple[int, Dict[str, Any]]def Indexed(
annotation: Any,
index_type: Optional[str] = None,
unique: bool = False,
sparse: bool = False,
**kwargs
) -> Any:
"""
Create indexed field annotation for automatic index creation.
Args:
annotation: Base field type (str, int, etc.)
index_type: MongoDB index type ("text", "2d", "2dsphere", etc.)
unique: Create unique index
sparse: Create sparse index (skip None values)
**kwargs: Additional index options
Returns:
Annotated field type with index metadata
"""
...from beanie import Document, Indexed
from typing import Annotated, Optional
class User(Document):
# Unique index on email
email: Annotated[str, Indexed(unique=True)]
# Text index for full-text search
bio: Annotated[str, Indexed(index_type="text")]
# Compound index (defined in Settings)
first_name: str
last_name: str
# Sparse index (ignores None values)
phone: Annotated[Optional[str], Indexed(sparse=True)]
class Settings:
collection = "users"
indexes = [
[("first_name", 1), ("last_name", 1)], # Compound index
[("email", 1), ("phone", 1)] # Multi-field index
]
# Alternative syntax
class Product(Document):
name: str = Indexed(str, unique=True)
price: float = Indexed(float)
category: str = Indexed(str, index_type="text")Reference fields that create relationships between documents in different collections with lazy loading and batch fetching capabilities.
class Link(Generic[T]):
"""Reference to documents in other collections with lazy loading."""
def __init__(self, ref: DBRef, document_class: Type[T]):
"""
Initialize document link.
Args:
ref: MongoDB DBRef pointing to the linked document
document_class: Target document class
"""
...
async def fetch(self, fetch_links: bool = False) -> Union[T, "Link[T]"]:
"""
Load the referenced document.
Args:
fetch_links: Also fetch nested links
Returns:
Referenced document or Link if not found
"""
...
@classmethod
async def fetch_one(cls, link: "Link[T]") -> Union[T, "Link[T]"]:
"""Fetch a single linked document."""
...
@classmethod
async def fetch_list(
cls,
links: List[Union["Link[T]", T]],
fetch_links: bool = False,
) -> List[Union[T, "Link[T]"]]:
"""
Load multiple referenced documents.
Args:
links: List of Link objects or documents
fetch_links: Also fetch nested links
Returns:
List of referenced documents or Links
"""
...
@classmethod
async def fetch_many(cls, links: List["Link[T]"]) -> List[Union[T, "Link[T]"]]:
"""Fetch multiple linked documents concurrently."""
...
def to_dict(self) -> Dict[str, str]:
"""Convert link to dictionary representation with id and collection."""
...
@staticmethod
def serialize(value: Union["Link[T]", BaseModel]) -> Dict[str, Any]:
"""Serialize link or document for JSON output."""
...from beanie import Document, Link
from typing import Optional, List
class Category(Document):
name: str
description: str
class Settings:
collection = "categories"
class Product(Document):
name: str
price: float
category: Link[Category] # Single reference
related_products: List[Link[Product]] = [] # Multiple references
class Settings:
collection = "products"
# Create linked documents
category = Category(name="Electronics", description="Electronic devices")
await category.insert()
product = Product(
name="Smartphone",
price=599.99,
category=category # Link automatically created
)
await product.insert()
# Fetch linked document
await product.category.fetch()
print(f"Category: {product.category.name}")
# Fetch with nested links
await product.fetch_all_links()Reverse reference fields that automatically maintain bidirectional relationships between documents.
class BackLink(Generic[T]):
"""Back-reference field for bidirectional relationships."""
def __init__(
self,
document_class: Type[T],
original_field: str,
lazy: bool = True
):
"""
Initialize back link.
Args:
document_class: Source document class
original_field: Field name in source document
lazy: Enable lazy loading
"""
...
async def fetch(self, limit: Optional[int] = None) -> List[T]:
"""
Fetch documents that reference this document.
Args:
limit: Maximum number of documents to fetch
Returns:
List of referencing documents
"""
...
def to_dict(self) -> Dict[str, str]:
"""Convert back link to dictionary representation with collection info."""
...from beanie import Document, Link, BackLink
from typing import List
class Author(Document):
name: str
books: List[BackLink["Book"]] = BackLink("Book", "author")
class Settings:
collection = "authors"
class Book(Document):
title: str
author: Link[Author]
class Settings:
collection = "books"
# Create author and books
author = Author(name="Jane Doe")
await author.insert()
book1 = Book(title="First Book", author=author)
book2 = Book(title="Second Book", author=author)
await book1.insert()
await book2.insert()
# Access books through back link
books = await author.books.fetch()
print(f"Author has {len(books)} books")Enumerations that control how linked documents are handled during write and delete operations.
class WriteRules(Enum):
"""Rules for handling linked documents during write operations."""
DO_NOTHING = "DO_NOTHING" # Don't save linked documents
WRITE = "WRITE" # Save linked documents too
class DeleteRules(Enum):
"""Rules for handling linked documents during delete operations."""
DO_NOTHING = "DO_NOTHING" # Leave linked documents unchanged
DELETE_LINKS = "DELETE_LINKS" # Delete references to this documentfrom beanie import Document, Link, WriteRules, DeleteRules
class Author(Document):
name: str
class Settings:
collection = "authors"
class Book(Document):
title: str
author: Link[Author]
class Settings:
collection = "books"
# Configure link behavior
write_rules = WriteRules.WRITE
delete_rules = DeleteRules.DELETE_LINKS
# When saving book with WriteRules.WRITE:
# - Author will be saved automatically if modified
book = Book(title="New Book", author=author)
await book.save() # Saves both book and author if needed
# When deleting author with DeleteRules.DELETE_LINKS:
# - All book.author references are removed
await author.delete() # Books will have author=Nonefrom typing import TypeVar, Generic, Optional, List, Dict, Any, Union
from enum import Enum
from bson import ObjectId
# Generic type for document links
T = TypeVar("T", bound="Document")
# Link rule enumerations
class WriteRules(Enum):
DO_NOTHING = "DO_NOTHING"
WRITE = "WRITE"
class DeleteRules(Enum):
DO_NOTHING = "DO_NOTHING"
DELETE_LINKS = "DELETE_LINKS"
# Field metadata types
IndexInfo = Dict[str, Any]
FieldInfo = Dict[str, Any]Install with Tessl CLI
npx tessl i tessl/pypi-beanie