Asynchronous Python ODM for MongoDB with modern Pydantic-based document mapping
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
MongoDB time series collection configuration with granularity control and TTL support.
class TimeSeriesConfig(BaseModel):
"""Configuration for MongoDB time series collections."""
time_field: str # Field containing timestamp
meta_field: Optional[str] = None # Optional metadata field
granularity: Optional[Granularity] = None # Time granularity level
expire_after_seconds: Optional[int] = None # TTL for documentsclass Granularity(Enum):
"""Time series granularity options."""
seconds = "seconds"
minutes = "minutes"
hours = "hours"from beanie import Document, TimeSeriesConfig, Granularity
from datetime import datetime
class SensorReading(Document):
timestamp: datetime
sensor_id: str
temperature: float
humidity: float
class Settings:
collection = "sensor_readings"
timeseries = TimeSeriesConfig(
time_field="timestamp",
meta_field="sensor_id",
granularity=Granularity.minutes,
expire_after_seconds=86400 # 24 hours TTL
)
# Insert time series data
reading = SensorReading(
timestamp=datetime.utcnow(),
sensor_id="sensor_001",
temperature=23.5,
humidity=45.2
)
await reading.insert()Install with Tessl CLI
npx tessl i tessl/pypi-beanie