CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-peewee

A lightweight Python ORM (Object-Relational Mapping) library that provides a simple and expressive interface for database operations.

Pending
Overview
Eval results
Files

extensions-playhouse.mddocs/

Extensions (Playhouse)

Advanced database extensions, additional field types, connection pooling, database-specific features, and framework integrations provided by the playhouse package. These extensions significantly expand Peewee's capabilities for specialized use cases and advanced database features.

Capabilities

Database Backend Extensions

Enhanced database implementations with advanced features beyond the core Peewee database classes.

# PostgreSQL Extensions
from playhouse.postgres_ext import PostgresqlExtDatabase, ArrayField, JSONField, HStoreField

class PostgresqlExtDatabase(Database):
    """Extended PostgreSQL database with array, JSON, and full-text search support."""

class ArrayField(Field):
    """
    PostgreSQL array field for list data.
    
    Parameters:
    - field_class: Base field type for array elements
    - field_kwargs (dict): Arguments for base field
    - dimensions (int): Number of array dimensions
    """

class JSONField(Field):
    """PostgreSQL JSON field for structured data."""

class BinaryJSONField(JSONField):
    """PostgreSQL JSONB field for binary JSON storage."""

class HStoreField(Field):
    """PostgreSQL key-value store field."""

class TSVectorField(Field):
    """PostgreSQL text search vector field."""

class DateTimeTZField(Field):
    """PostgreSQL timezone-aware datetime field."""

class IntervalField(Field):
    """PostgreSQL time interval field."""

# MySQL Extensions
from playhouse.mysql_ext import MySQLConnectorDatabase, JSONField

class MySQLConnectorDatabase(Database):
    """MySQL database using mysql.connector driver."""

class JSONField(Field):
    """MySQL JSON field for structured data."""

# SQLite Extensions
from playhouse.sqlite_ext import SqliteExtDatabase, JSONField, FTSModel

class SqliteExtDatabase(Database):
    """Extended SQLite with JSON1, FTS, and virtual table support."""

class JSONField(Field):
    """SQLite JSON field using JSON1 extension."""

class FTSModel(Model):
    """Base class for Full-Text Search models (FTS3/4)."""

class FTS5Model(Model):
    """Base class for FTS5 full-text search models."""

# CockroachDB
from playhouse.cockroachdb import CockroachDatabase, UUIDKeyField

class CockroachDatabase(Database):
    """CockroachDB distributed database implementation."""

class UUIDKeyField(Field):
    """UUID primary key field with automatic generation."""

Usage examples:

# PostgreSQL with arrays and JSON
from playhouse.postgres_ext import *

db = PostgresqlExtDatabase('mydb', user='postgres', password='secret')

class User(Model):
    username = CharField()
    tags = ArrayField(CharField)  # Array of strings
    metadata = JSONField()        # JSON data
    location = HStoreField()      # Key-value pairs
    
    class Meta:
        database = db

# Create with array and JSON data
user = User.create(
    username='john',
    tags=['admin', 'developer'],
    metadata={'age': 30, 'city': 'NYC'},
    location={'country': 'US', 'state': 'NY'}
)

# Query arrays and JSON
users = User.select().where(User.tags.contains(['admin']))
users = User.select().where(User.metadata['age'].astext.cast('int') > 25)

# SQLite with full-text search
from playhouse.sqlite_ext import *

db = SqliteExtDatabase('app.db')

class Document(FTSModel):
    title = TextField()
    content = TextField()
    
    class Meta:
        database = db

# Full-text search
results = Document.select().where(Document.match('python programming'))

Connection Pooling

Database connection pooling for improved performance and resource management in high-concurrency applications.

from playhouse.pool import PooledDatabase, PooledPostgresqlDatabase, PooledMySQLDatabase

class PooledDatabase(Database):
    """
    Base pooled database with connection limits.
    
    Parameters:
    - max_connections (int): Maximum pool connections
    - stale_timeout (int): Connection timeout in seconds
    - timeout (int): Wait timeout for available connection
    """

class PooledPostgresqlDatabase(PostgresqlDatabase, PooledDatabase):
    """PostgreSQL database with connection pooling."""

class PooledMySQLDatabase(MySQLDatabase, PooledDatabase):
    """MySQL database with connection pooling."""

class PooledSqliteDatabase(SqliteDatabase, PooledDatabase):
    """SQLite database with connection pooling."""

class MaxConnectionsExceeded(Exception):
    """Raised when connection pool limit is reached."""

Usage example:

from playhouse.pool import PooledPostgresqlDatabase

db = PooledPostgresqlDatabase(
    'mydb',
    user='postgres',
    password='secret',
    max_connections=20,    # Pool size
    stale_timeout=300,     # 5 minutes
    timeout=10             # Wait 10 seconds for connection
)

Database URL Configuration

Simple database configuration using URL strings for easy deployment and configuration management.

from playhouse.db_url import connect, parse, register_database

def connect(url, **kwargs):
    """
    Connect to database using URL string.
    
    Parameters:
    - url (str): Database URL (e.g., 'postgresql://user:pass@host/db')
    - **kwargs: Additional connection parameters
    
    Returns:
    Database: Connected database instance
    """

def parse(url):
    """
    Parse database URL to connection parameters.
    
    Parameters:
    - url (str): Database URL
    
    Returns:
    dict: Connection parameters
    """

def register_database(scheme, database_class):
    """
    Register custom database URL scheme.
    
    Parameters:
    - scheme (str): URL scheme name
    - database_class: Database class for scheme
    """

Usage examples:

from playhouse.db_url import connect

# Various database URLs
db = connect('sqlite:///app.db')
db = connect('postgresql://user:pass@localhost/mydb')
db = connect('mysql://user:pass@localhost/mydb')
db = connect('cockroachdb://user@localhost:26257/mydb')

# With additional parameters
db = connect('postgresql://user:pass@localhost/mydb', 
             sslmode='require', pool_size=20)

Schema Migrations

Database schema migration system for evolving database structure over time with version control.

from playhouse.migrate import migrate, SchemaMigrator, PostgresqlMigrator, MySQLMigrator, SqliteMigrator

class SchemaMigrator:
    """Base schema migration class."""
    
    def add_column(self, table, column_name, field):
        """Add new column to table."""
    
    def drop_column(self, table, column_name, cascade=True):
        """Remove column from table."""
    
    def rename_column(self, table, old_name, new_name):
        """Rename table column."""
    
    def add_not_null(self, table, column):
        """Add NOT NULL constraint."""
    
    def drop_not_null(self, table, column):
        """Remove NOT NULL constraint."""
    
    def add_index(self, table, columns, unique=False):
        """Add index to table."""
    
    def drop_index(self, table, index_name):
        """Remove index from table."""
    
    def rename_table(self, old_name, new_name):
        """Rename table."""

def migrate(*operations):
    """
    Execute migration operations.
    
    Parameters:
    - *operations: Migration operations to execute
    """

@operation
def custom_operation(migrator, database, **kwargs):
    """Decorator for custom migration operations."""

Usage examples:

from playhouse.migrate import *

# Create migrator for your database
migrator = PostgresqlMigrator(db)

# Define migrations
title_field = CharField(default='Untitled')
status_field = CharField(choices=['draft', 'published'], default='draft')

migrate(
    migrator.add_column('post', 'title', title_field),
    migrator.add_column('post', 'status', status_field),
    migrator.add_index('post', ('title', 'status'), unique=False),
)

# Custom migration
@operation
def create_user_indexes(migrator, database, **kwargs):
    database.execute_sql('CREATE INDEX idx_user_email ON user(email)')
    database.execute_sql('CREATE INDEX idx_user_created ON user(created_at)')

migrate(create_user_indexes(migrator, db))

Database Introspection and Model Generation

Reverse engineering tools for generating Peewee models from existing database schemas.

from playhouse.reflection import Introspector, generate_models, print_model

class Introspector:
    """Database introspection engine."""
    
    @classmethod
    def from_database(cls, database, schema=None):
        """
        Create introspector from database.
        
        Parameters:
        - database: Database instance
        - schema (str): Database schema name
        
        Returns:
        Introspector: Configured introspector
        """
    
    def introspect(self, table_names=None, include_views=False, snake_case=True):
        """
        Introspect database schema.
        
        Parameters:
        - table_names (list): Specific tables to introspect
        - include_views (bool): Include database views
        - snake_case (bool): Convert names to snake_case
        
        Returns:
        DatabaseMetadata: Schema information
        """

def generate_models(database, schema=None, **options):
    """
    Generate model classes from database schema.
    
    Parameters:
    - database: Database instance
    - schema (str): Database schema
    - **options: Generation options
    
    Returns:
    dict: Generated model classes
    """

def print_model(model):
    """Print model class definition as code."""

Usage example:

from playhouse.reflection import *

# Introspect existing database
introspector = Introspector.from_database(db)
models = generate_models(db)

# Print generated model code
for model in models.values():
    print_model(model)

# Use generated models
User = models['User']
users = User.select().where(User.is_active == True)

Data Import/Export Utilities

High-level data manipulation interfaces for importing and exporting data in various formats.

from playhouse.dataset import DataSet

class DataSet:
    """
    High-level database interface for data manipulation.
    
    Parameters:
    - database: Database instance
    """
    
    def __getitem__(self, table_name):
        """
        Get table interface.
        
        Parameters:
        - table_name (str): Table name
        
        Returns:
        Table: Table manipulation interface
        """
    
    def __contains__(self, table_name):
        """Check if table exists."""
    
    def tables(self):
        """List all table names."""

class Table:
    """Table-level data operations."""
    
    def insert(self, **kwargs):
        """Insert row into table."""
    
    def insert_many(self, rows):
        """Insert multiple rows."""
    
    def update(self, columns=None, **kwargs):
        """Update rows in table."""
    
    def find(self, **kwargs):
        """Find rows matching criteria."""
    
    def find_one(self, **kwargs):
        """Find single row."""
    
    def all(self):
        """Get all rows from table."""
    
    def delete(self, **kwargs):
        """Delete rows from table."""
    
    def freeze(self, format='json', filename=None):
        """Export table data."""
    
    def thaw(self, format='json', filename=None, strict=False):
        """Import table data."""

Usage examples:

from playhouse.dataset import DataSet

# Create dataset interface
ds = DataSet(db)

# Insert data
users_table = ds['users']
users_table.insert(username='john', email='john@example.com')

# Bulk insert
users_table.insert_many([
    {'username': 'jane', 'email': 'jane@example.com'},
    {'username': 'bob', 'email': 'bob@example.com'},
])

# Query data
active_users = users_table.find(is_active=True)
john = users_table.find_one(username='john')

# Export/import
users_table.freeze(format='json', filename='users.json')
users_table.thaw(format='json', filename='backup_users.json')

Framework Integrations

Integration helpers for web frameworks and testing utilities.

# Flask Integration
from playhouse.flask_utils import FlaskDB, get_object_or_404, PaginatedQuery

class FlaskDB:
    """
    Flask-Peewee integration helper.
    
    Parameters:
    - app: Flask application instance
    - database: Database instance
    """
    
    def connect_db(self):
        """Connect database for request."""
    
    def close_db(self, exc):
        """Close database after request."""

def get_object_or_404(query_or_model, *expressions):
    """
    Get object or raise 404 error.
    
    Parameters:
    - query_or_model: Model class or query
    - *expressions: Filter expressions
    
    Returns:
    Model instance
    
    Raises:
    404 error if not found
    """

class PaginatedQuery:
    """Query pagination for web views."""

# Signals (Django-style)
from playhouse.signals import Model, pre_save, post_save, pre_delete, post_delete

# Testing Utilities
from playhouse.test_utils import count_queries, assert_query_count

class count_queries:
    """Context manager for counting executed queries."""

def assert_query_count(expected_count):
    """Decorator to assert specific query count in tests."""

Usage examples:

# Flask integration
from flask import Flask
from playhouse.flask_utils import FlaskDB

app = Flask(__name__)
flask_db = FlaskDB(app, db)

@app.route('/users/<int:user_id>')
def user_detail(user_id):
    user = get_object_or_404(User, User.id == user_id)
    return f"User: {user.username}"

# Testing with query counting
from playhouse.test_utils import count_queries

def test_efficient_query():
    with count_queries() as counter:
        users = list(User.select().limit(10))
        posts = list(Post.select().where(Post.author.in_(users)))
    
    assert counter.count <= 2  # Should be efficient

@assert_query_count(1)
def test_single_query():
    User.select().count()  # This test will fail if more than 1 query

Specialized Field Types and Utilities

Additional field types and utility functions for specific use cases.

# Special Fields
from playhouse.fields import CompressedField, PickleField

class CompressedField(Field):
    """Automatic compression/decompression field using zlib."""

class PickleField(Field):
    """Python object serialization field using pickle."""

# Hybrid Properties
from playhouse.hybrid import hybrid_property, hybrid_method

class hybrid_property:
    """Properties that work on both instances and classes."""

class hybrid_method:
    """Methods that work on both instances and classes."""

# Model Utilities
from playhouse.shortcuts import model_to_dict, dict_to_model, update_model_from_dict

def model_to_dict(model, recurse=True, backrefs=False, only=None, exclude=None, extra_attrs=None, fields_from_query=None, max_depth=None):
    """
    Convert model instance to dictionary.
    
    Parameters:
    - model: Model instance
    - recurse (bool): Include related objects
    - backrefs (bool): Include back-references
    - only (list): Include only specified fields
    - exclude (list): Exclude specified fields
    - extra_attrs (list): Include extra attributes
    - fields_from_query: Use fields from query
    - max_depth (int): Maximum recursion depth
    
    Returns:
    dict: Model data as dictionary
    """

def dict_to_model(model_class, data, ignore_unknown=False):
    """
    Create model instance from dictionary.
    
    Parameters:
    - model_class: Model class
    - data (dict): Field data
    - ignore_unknown (bool): Ignore unknown fields
    
    Returns:
    Model instance
    """

def update_model_from_dict(instance, data, ignore_unknown=False):
    """
    Update model instance from dictionary.
    
    Parameters:
    - instance: Model instance to update
    - data (dict): New field data
    - ignore_unknown (bool): Ignore unknown fields
    
    Returns:
    list: Updated field names
    """

Usage examples:

from playhouse.fields import CompressedField, PickleField
from playhouse.shortcuts import model_to_dict, dict_to_model

class Document(Model):
    title = CharField()
    content = CompressedField()  # Automatically compressed
    metadata = PickleField()     # Python object storage
    
    class Meta:
        database = db

# Create with special fields
doc = Document.create(
    title='My Document',
    content='Very long content that will be compressed...',
    metadata={'author': 'John', 'tags': ['important', 'draft']}
)

# Convert to/from dictionaries
doc_dict = model_to_dict(doc)
new_doc = dict_to_model(Document, doc_dict)

# Update from dictionary
update_data = {'title': 'Updated Title', 'new_field': 'ignored'}
updated_fields = update_model_from_dict(doc, update_data, ignore_unknown=True)

Advanced SQLite Features

Comprehensive SQLite extensions for full-text search, virtual tables, and user-defined functions.

from playhouse.sqlite_ext import *

# Full-text search functions
def match(field, query):
    """Full-text search match function."""

def rank(fts_model):
    """Ranking function for FTS results."""

def bm25(fts_model, *weights):
    """BM25 ranking algorithm."""

# Virtual table functions
def ClosureTable(model_class, foreign_key=None):
    """Create closure table for hierarchical data."""

# User-defined functions (playhouse.sqlite_udf)
from playhouse.sqlite_udf import register_functions

# Many built-in functions available:
# Mathematical: sqrt, pow, log, sin, cos, etc.
# String: levenshtein, substr_count, strip_chars, etc.  
# Hash: md5, sha1, sha256, etc.
# Date: date_part, date_trunc, to_timestamp, etc.

Missing Playhouse Modules

Additional specialized modules for advanced use cases and specific database features.

# Key-Value Store Interface  
from playhouse.kv import KeyValue

class KeyValue:
    """
    Dictionary-like interface over database tables.
    
    Parameters:
    - database: Database instance
    - table_name (str): Table name for key-value storage
    """
    def __init__(self, database, table_name='keyvalue'): ...
    def __getitem__(self, key): ...
    def __setitem__(self, key, value): ...
    def __delitem__(self, key): ...
    def __contains__(self, key): ...
    def keys(self): ...
    def values(self): ...
    def items(self): ...

# Asynchronous SQLite Operations
from playhouse.sqliteq import SqliteQueueDatabase, AsyncCursor

class SqliteQueueDatabase(Database):
    """
    Asynchronous SQLite database with background query queue.
    
    Parameters:
    - database (str): Database file path
    - use_gevent (bool): Use gevent for async operations
    - autostart (bool): Automatically start writer thread
    - queue_max_size (int): Maximum queue size
    - results_timeout (int): Result timeout in seconds
    """

class AsyncCursor:
    """Asynchronous query cursor for non-blocking operations."""
    def fetchall(self): ...
    def fetchone(self): ...
    def fetchmany(self, size=None): ...

# Change Tracking
from playhouse.sqlite_changelog import ChangeLog, BaseChangeLog

class ChangeLog:
    """
    Change tracking system for table modifications.
    
    Parameters:
    - model: Model class to track changes for
    """
    def __init__(self, model): ...
    def install(self): ...
    def uninstall(self): ...

class BaseChangeLog(Model):
    """Base model for change log entries."""
    table = CharField()
    primary_key = CharField()
    operation = CharField()  # 'INSERT', 'UPDATE', 'DELETE'
    changes = TextField()    # JSON changes
    timestamp = DateTimeField(default=datetime.datetime.now)

# Hybrid Properties and Methods  
from playhouse.hybrid import hybrid_property, hybrid_method

class hybrid_property:
    """
    Properties that work on both instances and classes.
    
    Usage:
    class User(Model):
        first_name = CharField()
        last_name = CharField()
        
        @hybrid_property
        def full_name(self):
            return f"{self.first_name} {self.last_name}"
    """
    def __init__(self, func): ...
    def __get__(self, instance, owner): ...
    def expression(self, func): ...

class hybrid_method:
    """Methods that work on both instances and classes."""
    def __init__(self, func): ...
    def __get__(self, instance, owner): ...
    def expression(self, func): ...

Usage examples:

# Key-value store
from playhouse.kv import KeyValue

kv = KeyValue(db)
kv['config'] = {'debug': True, 'max_users': 1000}
config = kv['config']
del kv['old_setting']

# Async SQLite operations
from playhouse.sqliteq import SqliteQueueDatabase

async_db = SqliteQueueDatabase('async.db', autostart=True, results_timeout=10)

# Perform async query
cursor = async_db.execute_sql('SELECT * FROM users WHERE age > ?', (18,))
results = cursor.fetchall()

# Change tracking
from playhouse.sqlite_changelog import ChangeLog

user_changelog = ChangeLog(User)
user_changelog.install()  # Install triggers

# Now all User changes are tracked
user = User.create(username='john', email='john@example.com')
user.email = 'newemail@example.com'
user.save()

# Query change log
changes = BaseChangeLog.select().where(BaseChangeLog.table == 'user')

This comprehensive extension ecosystem makes Peewee suitable for everything from simple applications to complex data processing pipelines with advanced database features.

Install with Tessl CLI

npx tessl i tessl/pypi-peewee

docs

database-and-connections.md

extensions-playhouse.md

index.md

models-and-fields.md

queries-and-operations.md

tile.json