Create delightful software with Jupyter Notebooks
—
Run tests defined in notebook cells across your project in parallel. nbdev's testing system executes notebook cells marked as tests and provides detailed reporting on failures and performance.
Execute tests across all notebooks or specific notebook files.
def nbdev_test(path: str = None, flags: str = None, n_workers: int = None,
verbose: bool = True, force_flags: str = None,
do_print: bool = False, timing: bool = False):
"""
Run tests from notebooks in parallel.
Args:
path: Path to notebook(s) to test (default: all notebooks in nbs_path)
flags: Comma-separated flags marking cells to skip during testing
n_workers: Number of parallel workers (default: number of CPUs)
verbose: Print detailed test results
force_flags: Flags marking cells to always run regardless of skip flags
do_print: Print test completion messages
timing: Display timing information for test execution
Executes all notebook cells except those marked with skip flags,
running notebooks in parallel for efficiency.
"""
def test_nb(fn: str, skip_flags: list = None, force_flags: list = None,
do_print: bool = False, showerr: bool = True, basepath: str = None):
"""
Execute tests in a single notebook.
Args:
fn: Path to notebook file to test
skip_flags: List of flags marking cells to skip
force_flags: List of flags marking cells to always run
do_print: Print completion messages
showerr: Print errors to stderr
basepath: Path to add to sys.path for imports
Returns:
Tuple of (success: bool, execution_time: float)
"""Usage Examples:
from nbdev.test import nbdev_test, test_nb
# Run all tests in project
nbdev_test()
# Run tests with specific skip flags
nbdev_test(flags='slow,gpu')
# Run tests with timing information
nbdev_test(timing=True, verbose=True)
# Test specific notebook
success, exec_time = test_nb('notebooks/01_core.ipynb')
print(f"Test completed in {exec_time:.2f}s - {'PASSED' if success else 'FAILED'}")
# Test with custom flags
test_nb('example.ipynb', skip_flags=['slow'], force_flags=['critical'])Control test execution using special cell directives in notebooks:
#|skip: Skip this cell during testing#|slow: Mark as slow test (can be skipped with flags='slow')#|cuda: Mark as requiring CUDA (can be skipped if no GPU)#|eval: false: Never execute this cell#|eval: true: Always execute this cellYou can create custom test flags for your project:
# In notebook cell with custom flag
#|my_custom_flag
def test_special_feature():
assert some_condition()
# Skip custom flagged tests
nbdev_test(flags='my_custom_flag')nbdev sets specific environment variables during testing:
IN_TEST=1: Indicates code is running in test modeEach notebook test runs in its own execution environment:
from nbdev.test import test_nb
import sys
# Tests run with clean sys.path
# Test failures don't affect other notebooks
# Each notebook gets fresh importsTests run in parallel for efficiency:
from nbdev.test import nbdev_test
# Use all CPU cores (default)
nbdev_test()
# Limit parallel workers
nbdev_test(n_workers=4)
# Disable parallel execution
nbdev_test(n_workers=1)from nbdev.test import nbdev_test
# Detailed test reporting
nbdev_test(verbose=True)
# Minimal output
nbdev_test(verbose=False)
# Show timing information
nbdev_test(timing=True)from nbdev.test import test_nb
# Control error display
success, time = test_nb('test.ipynb', showerr=True)
# Suppress error output to stderr
success, time = test_nb('test.ipynb', showerr=False)nbdev testing integrates well with continuous integration:
# In GitHub Actions or other CI
nbdev_test --verbose --timing
# Skip slow tests in CI
nbdev_test --flags slow
# Run only fast, critical tests
nbdev_test --flags slow,gpu --force_flags critical# Cell 1: Setup
import numpy as np
from mylib import core_function
# Cell 2: Test function
def test_core_function():
result = core_function([1, 2, 3])
assert len(result) == 3
assert all(isinstance(x, int) for x in result)
# Cell 3: Run test
test_core_function()
print("✓ Core function test passed")
# Cell 4: Slow test
#|slow
def test_performance():
# This test takes a long time
large_data = generate_large_dataset()
result = core_function(large_data)
assert validate_result(result)
# Cell 5: Skip in automated testing
#|skip
def manual_test():
# This requires manual inspection
plot_results()Configure testing behavior in settings.ini:
tst_flags = notest slowThis automatically skips cells marked with notest or slow flags.
# Use descriptive test functions
def test_data_loading_handles_missing_files():
"""Test that data loading gracefully handles missing files."""
with pytest.raises(FileNotFoundError):
load_data('nonexistent.csv')
# Group related tests
def test_preprocessing_pipeline():
"""Test the complete preprocessing pipeline."""
raw_data = create_test_data()
processed = preprocess(raw_data)
assert processed.shape[0] == raw_data.shape[0]
assert 'processed_flag' in processed.columns
assert processed['processed_flag'].all()
# Use appropriate flags
#|slow
def test_large_dataset_processing():
"""Test processing with realistic large dataset."""
# This test uses significant memory/time
passComplete Testing Example:
from nbdev.test import nbdev_test, test_nb
from nbdev.config import get_config
# Get project configuration
config = get_config()
# Run all tests with timing
print(f"Running tests for {config.lib_name}")
nbdev_test(timing=True, verbose=True)
# Run quick tests only (skip slow ones)
print("Running quick tests only...")
nbdev_test(flags='slow', timing=True)Install with Tessl CLI
npx tessl i tessl/pypi-nbdev