SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness.
Overall
score
85%
SQLModel re-exports SQLAlchemy's database engine and connection management functionality, providing full compatibility with SQLAlchemy's engine system while maintaining seamless integration with SQLModel's enhanced features.
Functions for creating database engines that handle connection pooling and database communication.
def create_engine(
url: Union[str, URL],
*,
connect_args: Optional[Dict[str, Any]] = None,
echo: bool = False,
echo_pool: bool = False,
enable_from_linting: bool = True,
hide_parameters: bool = False,
insertmanyvalues_page_size: int = 1000,
isolation_level: Optional[str] = None,
json_deserializer: Optional[Callable] = None,
json_serializer: Optional[Callable] = None,
label_length: Optional[int] = None,
logging_name: Optional[str] = None,
max_identifier_length: Optional[int] = None,
max_overflow: int = 10,
module: Optional[Any] = None,
paramstyle: Optional[str] = None,
pool: Optional[Pool] = None,
poolclass: Optional[Type[Pool]] = None,
pool_logging_name: Optional[str] = None,
pool_pre_ping: bool = False,
pool_recycle: int = -1,
pool_reset_on_return: Optional[str] = None,
pool_size: int = 5,
pool_timeout: float = 30.0,
plugins: Optional[List[str]] = None,
query_cache_size: int = 500,
**kwargs: Any
) -> Engine:
"""
Create a database engine for connecting to databases.
Parameters:
url: Database URL (e.g., "sqlite:///database.db", "postgresql://user:pass@host/db")
connect_args: Additional arguments passed to the database driver
echo: Whether to log all SQL statements (useful for debugging)
echo_pool: Whether to log connection pool events
pool_size: Number of connections to maintain in the pool
max_overflow: Additional connections beyond pool_size
pool_timeout: Seconds to wait for connection from pool
pool_recycle: Seconds after which connection is recreated
pool_pre_ping: Validate connections before use
isolation_level: Transaction isolation level
Returns:
Engine object for database operations
"""
def create_mock_engine(dialect: str, executor: Callable) -> MockConnection:
"""
Create a mock engine for testing without a real database.
Parameters:
dialect: SQL dialect to mock
executor: Function to handle SQL execution
Returns:
MockConnection for testing purposes
"""
def engine_from_config(
configuration: Dict[str, Any],
prefix: str = "sqlalchemy.",
**kwargs: Any
) -> Engine:
"""
Create an engine from a configuration dictionary.
Parameters:
configuration: Dictionary with engine configuration
prefix: Prefix for configuration keys
**kwargs: Additional engine arguments
Returns:
Engine configured from the dictionary
"""Usage Examples:
# SQLite engine (file-based)
engine = create_engine("sqlite:///heroes.db", echo=True)
# PostgreSQL engine
engine = create_engine(
"postgresql://user:password@localhost/heroes",
pool_size=20,
max_overflow=0,
pool_recycle=3600
)
# MySQL engine with connection arguments
engine = create_engine(
"mysql+pymysql://user:password@localhost/heroes",
connect_args={"charset": "utf8mb4"},
echo=False
)
# In-memory SQLite for testing
engine = create_engine("sqlite:///:memory:", echo=True)
# Engine from configuration
config = {
"sqlalchemy.url": "sqlite:///heroes.db",
"sqlalchemy.echo": "true",
"sqlalchemy.pool_size": "10"
}
engine = engine_from_config(config)Connection pool classes for managing database connections efficiently.
class QueuePool(Pool):
"""
Queue-based connection pool (default for most databases).
Maintains a fixed-size pool of connections with overflow capability.
Connections are distributed on a FIFO basis.
"""
class StaticPool(Pool):
"""
Static connection pool that maintains a single connection.
Useful for in-memory databases or single-connection scenarios.
All operations share the same connection.
"""Usage Examples:
# Explicit pool configuration
from sqlalchemy.pool import QueuePool, StaticPool
# Custom queue pool
engine = create_engine(
"postgresql://user:pass@host/db",
poolclass=QueuePool,
pool_size=10,
max_overflow=20,
pool_pre_ping=True
)
# Static pool for SQLite
engine = create_engine(
"sqlite:///database.db",
poolclass=StaticPool,
connect_args={"check_same_thread": False}
)Utility for inspecting database schemas and metadata.
def inspect(bind: Union[Engine, Connection]) -> Inspector:
"""
Create an Inspector for examining database structure.
Parameters:
bind: Engine or Connection to inspect
Returns:
Inspector object for schema reflection
"""Usage Examples:
# Inspect database schema
inspector = inspect(engine)
# Get table names
table_names = inspector.get_table_names()
print("Tables:", table_names)
# Get column information
columns = inspector.get_columns("heroes")
for column in columns:
print(f"Column: {column['name']}, Type: {column['type']}")
# Get foreign keys
foreign_keys = inspector.get_foreign_keys("heroes")
for fk in foreign_keys:
print(f"FK: {fk['constrained_columns']} -> {fk['referred_table']}.{fk['referred_columns']}")
# Check if table exists
if inspector.has_table("heroes"):
print("Heroes table exists")The engine system integrates seamlessly with SQLModel's session management and model definitions.
Complete Application Example:
from sqlmodel import SQLModel, Field, Session, create_engine, select
from typing import Optional
# Define model
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
# Create engine
engine = create_engine("sqlite:///heroes.db", echo=True)
# Create tables
SQLModel.metadata.create_all(engine)
# Use with sessions
def create_hero(name: str, secret_name: str, age: Optional[int] = None) -> Hero:
hero = Hero(name=name, secret_name=secret_name, age=age)
with Session(engine) as session:
session.add(hero)
session.commit()
session.refresh(hero)
return hero
def get_heroes() -> List[Hero]:
with Session(engine) as session:
statement = select(Hero)
heroes = session.exec(statement).all()
return heroes
# Usage
hero = create_hero("Spider-Boy", "Pedro Parqueador", 16)
all_heroes = get_heroes()Production Configuration:
# Production PostgreSQL setup
engine = create_engine(
"postgresql://user:password@host:5432/database",
# Connection pool settings
pool_size=20, # Base number of connections
max_overflow=30, # Additional connections when needed
pool_recycle=3600, # Recreate connections after 1 hour
pool_pre_ping=True, # Validate connections before use
# Performance settings
echo=False, # Disable SQL logging
query_cache_size=1000, # Increase query cache
# Connection arguments
connect_args={
"sslmode": "require",
"connect_timeout": 10,
"application_name": "my_app"
}
)Development Configuration:
# Development SQLite setup
engine = create_engine(
"sqlite:///dev_database.db",
echo=True, # Enable SQL logging for debugging
connect_args={"check_same_thread": False} # Allow multiple threads
)Testing Configuration:
# In-memory database for tests
test_engine = create_engine(
"sqlite:///:memory:",
echo=False,
poolclass=StaticPool,
connect_args={"check_same_thread": False}
)Install with Tessl CLI
npx tessl i tessl/pypi-sqlmodeldocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10