0
# Extensions (Playhouse)
1
2
Advanced database extensions, additional field types, connection pooling, database-specific features, and framework integrations provided by the playhouse package. These extensions significantly expand Peewee's capabilities for specialized use cases and advanced database features.
3
4
## Capabilities
5
6
### Database Backend Extensions
7
8
Enhanced database implementations with advanced features beyond the core Peewee database classes.
9
10
```python { .api }
11
# PostgreSQL Extensions
12
from playhouse.postgres_ext import PostgresqlExtDatabase, ArrayField, JSONField, HStoreField
13
14
class PostgresqlExtDatabase(Database):
15
"""Extended PostgreSQL database with array, JSON, and full-text search support."""
16
17
class ArrayField(Field):
18
"""
19
PostgreSQL array field for list data.
20
21
Parameters:
22
- field_class: Base field type for array elements
23
- field_kwargs (dict): Arguments for base field
24
- dimensions (int): Number of array dimensions
25
"""
26
27
class JSONField(Field):
28
"""PostgreSQL JSON field for structured data."""
29
30
class BinaryJSONField(JSONField):
31
"""PostgreSQL JSONB field for binary JSON storage."""
32
33
class HStoreField(Field):
34
"""PostgreSQL key-value store field."""
35
36
class TSVectorField(Field):
37
"""PostgreSQL text search vector field."""
38
39
class DateTimeTZField(Field):
40
"""PostgreSQL timezone-aware datetime field."""
41
42
class IntervalField(Field):
43
"""PostgreSQL time interval field."""
44
45
# MySQL Extensions
46
from playhouse.mysql_ext import MySQLConnectorDatabase, JSONField
47
48
class MySQLConnectorDatabase(Database):
49
"""MySQL database using mysql.connector driver."""
50
51
class JSONField(Field):
52
"""MySQL JSON field for structured data."""
53
54
# SQLite Extensions
55
from playhouse.sqlite_ext import SqliteExtDatabase, JSONField, FTSModel
56
57
class SqliteExtDatabase(Database):
58
"""Extended SQLite with JSON1, FTS, and virtual table support."""
59
60
class JSONField(Field):
61
"""SQLite JSON field using JSON1 extension."""
62
63
class FTSModel(Model):
64
"""Base class for Full-Text Search models (FTS3/4)."""
65
66
class FTS5Model(Model):
67
"""Base class for FTS5 full-text search models."""
68
69
# CockroachDB
70
from playhouse.cockroachdb import CockroachDatabase, UUIDKeyField
71
72
class CockroachDatabase(Database):
73
"""CockroachDB distributed database implementation."""
74
75
class UUIDKeyField(Field):
76
"""UUID primary key field with automatic generation."""
77
```
78
79
Usage examples:
80
81
```python
82
# PostgreSQL with arrays and JSON
83
from playhouse.postgres_ext import *
84
85
db = PostgresqlExtDatabase('mydb', user='postgres', password='secret')
86
87
class User(Model):
88
username = CharField()
89
tags = ArrayField(CharField) # Array of strings
90
metadata = JSONField() # JSON data
91
location = HStoreField() # Key-value pairs
92
93
class Meta:
94
database = db
95
96
# Create with array and JSON data
97
user = User.create(
98
username='john',
99
tags=['admin', 'developer'],
100
metadata={'age': 30, 'city': 'NYC'},
101
location={'country': 'US', 'state': 'NY'}
102
)
103
104
# Query arrays and JSON
105
users = User.select().where(User.tags.contains(['admin']))
106
users = User.select().where(User.metadata['age'].astext.cast('int') > 25)
107
108
# SQLite with full-text search
109
from playhouse.sqlite_ext import *
110
111
db = SqliteExtDatabase('app.db')
112
113
class Document(FTSModel):
114
title = TextField()
115
content = TextField()
116
117
class Meta:
118
database = db
119
120
# Full-text search
121
results = Document.select().where(Document.match('python programming'))
122
```
123
124
### Connection Pooling
125
126
Database connection pooling for improved performance and resource management in high-concurrency applications.
127
128
```python { .api }
129
from playhouse.pool import PooledDatabase, PooledPostgresqlDatabase, PooledMySQLDatabase
130
131
class PooledDatabase(Database):
132
"""
133
Base pooled database with connection limits.
134
135
Parameters:
136
- max_connections (int): Maximum pool connections
137
- stale_timeout (int): Connection timeout in seconds
138
- timeout (int): Wait timeout for available connection
139
"""
140
141
class PooledPostgresqlDatabase(PostgresqlDatabase, PooledDatabase):
142
"""PostgreSQL database with connection pooling."""
143
144
class PooledMySQLDatabase(MySQLDatabase, PooledDatabase):
145
"""MySQL database with connection pooling."""
146
147
class PooledSqliteDatabase(SqliteDatabase, PooledDatabase):
148
"""SQLite database with connection pooling."""
149
150
class MaxConnectionsExceeded(Exception):
151
"""Raised when connection pool limit is reached."""
152
```
153
154
Usage example:
155
156
```python
157
from playhouse.pool import PooledPostgresqlDatabase
158
159
db = PooledPostgresqlDatabase(
160
'mydb',
161
user='postgres',
162
password='secret',
163
max_connections=20, # Pool size
164
stale_timeout=300, # 5 minutes
165
timeout=10 # Wait 10 seconds for connection
166
)
167
```
168
169
### Database URL Configuration
170
171
Simple database configuration using URL strings for easy deployment and configuration management.
172
173
```python { .api }
174
from playhouse.db_url import connect, parse, register_database
175
176
def connect(url, **kwargs):
177
"""
178
Connect to database using URL string.
179
180
Parameters:
181
- url (str): Database URL (e.g., 'postgresql://user:pass@host/db')
182
- **kwargs: Additional connection parameters
183
184
Returns:
185
Database: Connected database instance
186
"""
187
188
def parse(url):
189
"""
190
Parse database URL to connection parameters.
191
192
Parameters:
193
- url (str): Database URL
194
195
Returns:
196
dict: Connection parameters
197
"""
198
199
def register_database(scheme, database_class):
200
"""
201
Register custom database URL scheme.
202
203
Parameters:
204
- scheme (str): URL scheme name
205
- database_class: Database class for scheme
206
"""
207
```
208
209
Usage examples:
210
211
```python
212
from playhouse.db_url import connect
213
214
# Various database URLs
215
db = connect('sqlite:///app.db')
216
db = connect('postgresql://user:pass@localhost/mydb')
217
db = connect('mysql://user:pass@localhost/mydb')
218
db = connect('cockroachdb://user@localhost:26257/mydb')
219
220
# With additional parameters
221
db = connect('postgresql://user:pass@localhost/mydb',
222
sslmode='require', pool_size=20)
223
```
224
225
### Schema Migrations
226
227
Database schema migration system for evolving database structure over time with version control.
228
229
```python { .api }
230
from playhouse.migrate import migrate, SchemaMigrator, PostgresqlMigrator, MySQLMigrator, SqliteMigrator
231
232
class SchemaMigrator:
233
"""Base schema migration class."""
234
235
def add_column(self, table, column_name, field):
236
"""Add new column to table."""
237
238
def drop_column(self, table, column_name, cascade=True):
239
"""Remove column from table."""
240
241
def rename_column(self, table, old_name, new_name):
242
"""Rename table column."""
243
244
def add_not_null(self, table, column):
245
"""Add NOT NULL constraint."""
246
247
def drop_not_null(self, table, column):
248
"""Remove NOT NULL constraint."""
249
250
def add_index(self, table, columns, unique=False):
251
"""Add index to table."""
252
253
def drop_index(self, table, index_name):
254
"""Remove index from table."""
255
256
def rename_table(self, old_name, new_name):
257
"""Rename table."""
258
259
def migrate(*operations):
260
"""
261
Execute migration operations.
262
263
Parameters:
264
- *operations: Migration operations to execute
265
"""
266
267
@operation
268
def custom_operation(migrator, database, **kwargs):
269
"""Decorator for custom migration operations."""
270
```
271
272
Usage examples:
273
274
```python
275
from playhouse.migrate import *
276
277
# Create migrator for your database
278
migrator = PostgresqlMigrator(db)
279
280
# Define migrations
281
title_field = CharField(default='Untitled')
282
status_field = CharField(choices=['draft', 'published'], default='draft')
283
284
migrate(
285
migrator.add_column('post', 'title', title_field),
286
migrator.add_column('post', 'status', status_field),
287
migrator.add_index('post', ('title', 'status'), unique=False),
288
)
289
290
# Custom migration
291
@operation
292
def create_user_indexes(migrator, database, **kwargs):
293
database.execute_sql('CREATE INDEX idx_user_email ON user(email)')
294
database.execute_sql('CREATE INDEX idx_user_created ON user(created_at)')
295
296
migrate(create_user_indexes(migrator, db))
297
```
298
299
### Database Introspection and Model Generation
300
301
Reverse engineering tools for generating Peewee models from existing database schemas.
302
303
```python { .api }
304
from playhouse.reflection import Introspector, generate_models, print_model
305
306
class Introspector:
307
"""Database introspection engine."""
308
309
@classmethod
310
def from_database(cls, database, schema=None):
311
"""
312
Create introspector from database.
313
314
Parameters:
315
- database: Database instance
316
- schema (str): Database schema name
317
318
Returns:
319
Introspector: Configured introspector
320
"""
321
322
def introspect(self, table_names=None, include_views=False, snake_case=True):
323
"""
324
Introspect database schema.
325
326
Parameters:
327
- table_names (list): Specific tables to introspect
328
- include_views (bool): Include database views
329
- snake_case (bool): Convert names to snake_case
330
331
Returns:
332
DatabaseMetadata: Schema information
333
"""
334
335
def generate_models(database, schema=None, **options):
336
"""
337
Generate model classes from database schema.
338
339
Parameters:
340
- database: Database instance
341
- schema (str): Database schema
342
- **options: Generation options
343
344
Returns:
345
dict: Generated model classes
346
"""
347
348
def print_model(model):
349
"""Print model class definition as code."""
350
```
351
352
Usage example:
353
354
```python
355
from playhouse.reflection import *
356
357
# Introspect existing database
358
introspector = Introspector.from_database(db)
359
models = generate_models(db)
360
361
# Print generated model code
362
for model in models.values():
363
print_model(model)
364
365
# Use generated models
366
User = models['User']
367
users = User.select().where(User.is_active == True)
368
```
369
370
### Data Import/Export Utilities
371
372
High-level data manipulation interfaces for importing and exporting data in various formats.
373
374
```python { .api }
375
from playhouse.dataset import DataSet
376
377
class DataSet:
378
"""
379
High-level database interface for data manipulation.
380
381
Parameters:
382
- database: Database instance
383
"""
384
385
def __getitem__(self, table_name):
386
"""
387
Get table interface.
388
389
Parameters:
390
- table_name (str): Table name
391
392
Returns:
393
Table: Table manipulation interface
394
"""
395
396
def __contains__(self, table_name):
397
"""Check if table exists."""
398
399
def tables(self):
400
"""List all table names."""
401
402
class Table:
403
"""Table-level data operations."""
404
405
def insert(self, **kwargs):
406
"""Insert row into table."""
407
408
def insert_many(self, rows):
409
"""Insert multiple rows."""
410
411
def update(self, columns=None, **kwargs):
412
"""Update rows in table."""
413
414
def find(self, **kwargs):
415
"""Find rows matching criteria."""
416
417
def find_one(self, **kwargs):
418
"""Find single row."""
419
420
def all(self):
421
"""Get all rows from table."""
422
423
def delete(self, **kwargs):
424
"""Delete rows from table."""
425
426
def freeze(self, format='json', filename=None):
427
"""Export table data."""
428
429
def thaw(self, format='json', filename=None, strict=False):
430
"""Import table data."""
431
```
432
433
Usage examples:
434
435
```python
436
from playhouse.dataset import DataSet
437
438
# Create dataset interface
439
ds = DataSet(db)
440
441
# Insert data
442
users_table = ds['users']
443
users_table.insert(username='john', email='john@example.com')
444
445
# Bulk insert
446
users_table.insert_many([
447
{'username': 'jane', 'email': 'jane@example.com'},
448
{'username': 'bob', 'email': 'bob@example.com'},
449
])
450
451
# Query data
452
active_users = users_table.find(is_active=True)
453
john = users_table.find_one(username='john')
454
455
# Export/import
456
users_table.freeze(format='json', filename='users.json')
457
users_table.thaw(format='json', filename='backup_users.json')
458
```
459
460
### Framework Integrations
461
462
Integration helpers for web frameworks and testing utilities.
463
464
```python { .api }
465
# Flask Integration
466
from playhouse.flask_utils import FlaskDB, get_object_or_404, PaginatedQuery
467
468
class FlaskDB:
469
"""
470
Flask-Peewee integration helper.
471
472
Parameters:
473
- app: Flask application instance
474
- database: Database instance
475
"""
476
477
def connect_db(self):
478
"""Connect database for request."""
479
480
def close_db(self, exc):
481
"""Close database after request."""
482
483
def get_object_or_404(query_or_model, *expressions):
484
"""
485
Get object or raise 404 error.
486
487
Parameters:
488
- query_or_model: Model class or query
489
- *expressions: Filter expressions
490
491
Returns:
492
Model instance
493
494
Raises:
495
404 error if not found
496
"""
497
498
class PaginatedQuery:
499
"""Query pagination for web views."""
500
501
# Signals (Django-style)
502
from playhouse.signals import Model, pre_save, post_save, pre_delete, post_delete
503
504
# Testing Utilities
505
from playhouse.test_utils import count_queries, assert_query_count
506
507
class count_queries:
508
"""Context manager for counting executed queries."""
509
510
def assert_query_count(expected_count):
511
"""Decorator to assert specific query count in tests."""
512
```
513
514
Usage examples:
515
516
```python
517
# Flask integration
518
from flask import Flask
519
from playhouse.flask_utils import FlaskDB
520
521
app = Flask(__name__)
522
flask_db = FlaskDB(app, db)
523
524
@app.route('/users/<int:user_id>')
525
def user_detail(user_id):
526
user = get_object_or_404(User, User.id == user_id)
527
return f"User: {user.username}"
528
529
# Testing with query counting
530
from playhouse.test_utils import count_queries
531
532
def test_efficient_query():
533
with count_queries() as counter:
534
users = list(User.select().limit(10))
535
posts = list(Post.select().where(Post.author.in_(users)))
536
537
assert counter.count <= 2 # Should be efficient
538
539
@assert_query_count(1)
540
def test_single_query():
541
User.select().count() # This test will fail if more than 1 query
542
```
543
544
### Specialized Field Types and Utilities
545
546
Additional field types and utility functions for specific use cases.
547
548
```python { .api }
549
# Special Fields
550
from playhouse.fields import CompressedField, PickleField
551
552
class CompressedField(Field):
553
"""Automatic compression/decompression field using zlib."""
554
555
class PickleField(Field):
556
"""Python object serialization field using pickle."""
557
558
# Hybrid Properties
559
from playhouse.hybrid import hybrid_property, hybrid_method
560
561
class hybrid_property:
562
"""Properties that work on both instances and classes."""
563
564
class hybrid_method:
565
"""Methods that work on both instances and classes."""
566
567
# Model Utilities
568
from playhouse.shortcuts import model_to_dict, dict_to_model, update_model_from_dict
569
570
def model_to_dict(model, recurse=True, backrefs=False, only=None, exclude=None, extra_attrs=None, fields_from_query=None, max_depth=None):
571
"""
572
Convert model instance to dictionary.
573
574
Parameters:
575
- model: Model instance
576
- recurse (bool): Include related objects
577
- backrefs (bool): Include back-references
578
- only (list): Include only specified fields
579
- exclude (list): Exclude specified fields
580
- extra_attrs (list): Include extra attributes
581
- fields_from_query: Use fields from query
582
- max_depth (int): Maximum recursion depth
583
584
Returns:
585
dict: Model data as dictionary
586
"""
587
588
def dict_to_model(model_class, data, ignore_unknown=False):
589
"""
590
Create model instance from dictionary.
591
592
Parameters:
593
- model_class: Model class
594
- data (dict): Field data
595
- ignore_unknown (bool): Ignore unknown fields
596
597
Returns:
598
Model instance
599
"""
600
601
def update_model_from_dict(instance, data, ignore_unknown=False):
602
"""
603
Update model instance from dictionary.
604
605
Parameters:
606
- instance: Model instance to update
607
- data (dict): New field data
608
- ignore_unknown (bool): Ignore unknown fields
609
610
Returns:
611
list: Updated field names
612
"""
613
```
614
615
Usage examples:
616
617
```python
618
from playhouse.fields import CompressedField, PickleField
619
from playhouse.shortcuts import model_to_dict, dict_to_model
620
621
class Document(Model):
622
title = CharField()
623
content = CompressedField() # Automatically compressed
624
metadata = PickleField() # Python object storage
625
626
class Meta:
627
database = db
628
629
# Create with special fields
630
doc = Document.create(
631
title='My Document',
632
content='Very long content that will be compressed...',
633
metadata={'author': 'John', 'tags': ['important', 'draft']}
634
)
635
636
# Convert to/from dictionaries
637
doc_dict = model_to_dict(doc)
638
new_doc = dict_to_model(Document, doc_dict)
639
640
# Update from dictionary
641
update_data = {'title': 'Updated Title', 'new_field': 'ignored'}
642
updated_fields = update_model_from_dict(doc, update_data, ignore_unknown=True)
643
```
644
645
## Advanced SQLite Features
646
647
Comprehensive SQLite extensions for full-text search, virtual tables, and user-defined functions.
648
649
```python { .api }
650
from playhouse.sqlite_ext import *
651
652
# Full-text search functions
653
def match(field, query):
654
"""Full-text search match function."""
655
656
def rank(fts_model):
657
"""Ranking function for FTS results."""
658
659
def bm25(fts_model, *weights):
660
"""BM25 ranking algorithm."""
661
662
# Virtual table functions
663
def ClosureTable(model_class, foreign_key=None):
664
"""Create closure table for hierarchical data."""
665
666
# User-defined functions (playhouse.sqlite_udf)
667
from playhouse.sqlite_udf import register_functions
668
669
# Many built-in functions available:
670
# Mathematical: sqrt, pow, log, sin, cos, etc.
671
# String: levenshtein, substr_count, strip_chars, etc.
672
# Hash: md5, sha1, sha256, etc.
673
# Date: date_part, date_trunc, to_timestamp, etc.
674
```
675
676
### Missing Playhouse Modules
677
678
Additional specialized modules for advanced use cases and specific database features.
679
680
```python { .api }
681
# Key-Value Store Interface
682
from playhouse.kv import KeyValue
683
684
class KeyValue:
685
"""
686
Dictionary-like interface over database tables.
687
688
Parameters:
689
- database: Database instance
690
- table_name (str): Table name for key-value storage
691
"""
692
def __init__(self, database, table_name='keyvalue'): ...
693
def __getitem__(self, key): ...
694
def __setitem__(self, key, value): ...
695
def __delitem__(self, key): ...
696
def __contains__(self, key): ...
697
def keys(self): ...
698
def values(self): ...
699
def items(self): ...
700
701
# Asynchronous SQLite Operations
702
from playhouse.sqliteq import SqliteQueueDatabase, AsyncCursor
703
704
class SqliteQueueDatabase(Database):
705
"""
706
Asynchronous SQLite database with background query queue.
707
708
Parameters:
709
- database (str): Database file path
710
- use_gevent (bool): Use gevent for async operations
711
- autostart (bool): Automatically start writer thread
712
- queue_max_size (int): Maximum queue size
713
- results_timeout (int): Result timeout in seconds
714
"""
715
716
class AsyncCursor:
717
"""Asynchronous query cursor for non-blocking operations."""
718
def fetchall(self): ...
719
def fetchone(self): ...
720
def fetchmany(self, size=None): ...
721
722
# Change Tracking
723
from playhouse.sqlite_changelog import ChangeLog, BaseChangeLog
724
725
class ChangeLog:
726
"""
727
Change tracking system for table modifications.
728
729
Parameters:
730
- model: Model class to track changes for
731
"""
732
def __init__(self, model): ...
733
def install(self): ...
734
def uninstall(self): ...
735
736
class BaseChangeLog(Model):
737
"""Base model for change log entries."""
738
table = CharField()
739
primary_key = CharField()
740
operation = CharField() # 'INSERT', 'UPDATE', 'DELETE'
741
changes = TextField() # JSON changes
742
timestamp = DateTimeField(default=datetime.datetime.now)
743
744
# Hybrid Properties and Methods
745
from playhouse.hybrid import hybrid_property, hybrid_method
746
747
class hybrid_property:
748
"""
749
Properties that work on both instances and classes.
750
751
Usage:
752
class User(Model):
753
first_name = CharField()
754
last_name = CharField()
755
756
@hybrid_property
757
def full_name(self):
758
return f"{self.first_name} {self.last_name}"
759
"""
760
def __init__(self, func): ...
761
def __get__(self, instance, owner): ...
762
def expression(self, func): ...
763
764
class hybrid_method:
765
"""Methods that work on both instances and classes."""
766
def __init__(self, func): ...
767
def __get__(self, instance, owner): ...
768
def expression(self, func): ...
769
```
770
771
Usage examples:
772
773
```python
774
# Key-value store
775
from playhouse.kv import KeyValue
776
777
kv = KeyValue(db)
778
kv['config'] = {'debug': True, 'max_users': 1000}
779
config = kv['config']
780
del kv['old_setting']
781
782
# Async SQLite operations
783
from playhouse.sqliteq import SqliteQueueDatabase
784
785
async_db = SqliteQueueDatabase('async.db', autostart=True, results_timeout=10)
786
787
# Perform async query
788
cursor = async_db.execute_sql('SELECT * FROM users WHERE age > ?', (18,))
789
results = cursor.fetchall()
790
791
# Change tracking
792
from playhouse.sqlite_changelog import ChangeLog
793
794
user_changelog = ChangeLog(User)
795
user_changelog.install() # Install triggers
796
797
# Now all User changes are tracked
798
user = User.create(username='john', email='john@example.com')
799
user.email = 'newemail@example.com'
800
user.save()
801
802
# Query change log
803
changes = BaseChangeLog.select().where(BaseChangeLog.table == 'user')
804
```
805
806
This comprehensive extension ecosystem makes Peewee suitable for everything from simple applications to complex data processing pipelines with advanced database features.