Unified deep learning framework integrating PyTorch Lightning, Lightning Fabric, and Lightning Apps for training, deploying, and shipping AI products.
npx @tessl/cli install tessl/pypi-pytorch-lightning@1.9.0A unified deep learning framework that integrates PyTorch Lightning, Lightning Fabric, and Lightning Apps to provide a complete solution for training, deploying, and shipping AI products. Lightning abstracts away boilerplate code while maintaining flexibility, enabling everything from simple research experiments to complex multi-cloud production systems.
lightning)pip install pytorch-lightning or pip install lightning (full suite)Option 1: Unified Lightning (requires full lightning package):
import lightning as LOption 2: PyTorch Lightning specific:
import pytorch_lightning as plOption 3: Individual component imports:
# Training components (from pytorch-lightning)
from pytorch_lightning import Trainer, LightningModule, LightningDataModule, Callback
# Fabric (from lightning-fabric)
from lightning_fabric import Fabric, seed_everything
# Apps (from lightning-app)
from lightning_app import LightningApp, LightningFlow, LightningWork
from lightning_app import CloudCompute, BuildConfigOption 4: Sub-package imports:
# If lightning is installed
from lightning import Trainer, LightningModule, LightningDataModule, Callback
from lightning import Fabric, seed_everything
from lightning import LightningApp, LightningFlow, LightningWork
from lightning import CloudCompute, BuildConfigimport lightning as L
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader, TensorDataset
# Define a simple model
class SimpleModel(L.LightningModule):
def __init__(self):
super().__init__()
self.layer = nn.Linear(10, 1)
def forward(self, x):
return self.layer(x)
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.mse_loss(y_hat, y)
return loss
def configure_optimizers(self):
return torch.optim.Adam(self.parameters())
# Create sample data
X = torch.randn(1000, 10)
y = torch.randn(1000, 1)
dataset = TensorDataset(X, y)
dataloader = DataLoader(dataset, batch_size=32)
# Train the model
model = SimpleModel()
trainer = L.Trainer(max_epochs=5)
trainer.fit(model, dataloader)Lightning is built on three core pillars that work together:
The main lightning package serves as an umbrella that exposes key functionality from all three components, providing a unified interface for the complete ML lifecycle from research to production.
Core PyTorch Lightning components for organizing training code, managing experiments, and scaling across devices. Includes the main Trainer class, base classes for models and data modules, and the callback system.
class Trainer:
def __init__(self, max_epochs: int = None, logger: bool = True, **kwargs): ...
def fit(self, model: LightningModule, train_dataloaders=None, **kwargs): ...
def validate(self, model: LightningModule = None, dataloaders=None, **kwargs): ...
def test(self, model: LightningModule = None, dataloaders=None, **kwargs): ...
def predict(self, model: LightningModule = None, dataloaders=None, **kwargs): ...
class LightningModule:
def training_step(self, batch, batch_idx): ...
def validation_step(self, batch, batch_idx): ...
def test_step(self, batch, batch_idx): ...
def configure_optimizers(self): ...
class LightningDataModule:
def setup(self, stage: str = None): ...
def train_dataloader(self): ...
def val_dataloader(self): ...
def test_dataloader(self): ...
class Callback:
def on_train_start(self, trainer, pl_module): ...
def on_train_end(self, trainer, pl_module): ...
def on_epoch_start(self, trainer, pl_module): ...
def on_epoch_end(self, trainer, pl_module): ...Training and Model Organization
Lightning Fabric provides fine-grained control over training loops while handling device management, distributed training setup, and gradient synchronization automatically.
class Fabric:
def __init__(self, accelerator: str = "auto", devices: int = "auto", **kwargs): ...
def setup(self, model, *optimizers): ...
def backward(self, loss): ...
def step(self, optimizer): ...
def load(self, path: str): ...
def save(self, path: str, state: dict): ...
def seed_everything(seed: int, workers: bool = False) -> int: ...Lightning Apps framework for building end-to-end ML systems with components for workflow orchestration, computational work distribution, and cloud deployment.
class LightningApp:
def __init__(self, root: LightningFlow, info: dict = None): ...
class LightningFlow:
def run(self): ...
def configure_layout(self): ...
class LightningWork:
def run(self, *args, **kwargs): ...
class CloudCompute:
def __init__(self, name: str, disk_size: int = 0, idle_timeout: int = None): ...
class BuildConfig:
def __init__(self, image: str = None, requirements: list = None, dockerfile: str = None): ...Storage abstractions and utility modules for Lightning Apps, including file system operations, cloud storage integrations, and debugging tools.
# Storage module functions and classes
# Debugging utilities# Common type aliases used across the framework
from typing import Union, Optional, List, Dict, Any, Callable, Iterable, Sequence, Tuple
from pathlib import Path
from datetime import timedelta
from torch import Tensor
from torch.nn import Module
from torch.optim import Optimizer
from torch.utils.data import DataLoader
from lightning_fabric.utilities.types import _PATH
from pytorch_lightning.loggers import Logger
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.plugins import PrecisionPlugin
from pytorch_lightning.profilers import Profiler
# Lightning-specific type aliases
_PATH = Union[str, Path]