pytest plugin that provides unittest subTest() support and subtests fixture for pure pytest tests
npx @tessl/cli install tessl/pypi-pytest-subtests@0.14.0A pytest plugin that provides unittest's subTest() support and introduces a subtests fixture for pure pytest tests. It enables developers to run multiple related test cases within a single test function while maintaining individual failure reporting and isolation, making it particularly useful for parameterized testing scenarios where you want to see all failures rather than stopping at the first one.
pip install pytest-subtestsimport pytest_subtests
from pytest_subtests import SubTestsFor use in test functions, the subtests fixture is automatically available:
def test_example(subtests):
# subtests fixture is automatically injected
passThe plugin registers automatically when pytest-subtests is installed - no manual registration needed.
def test_with_subtests(subtests):
for i in range(5):
with subtests.test(msg="custom message", i=i):
assert i % 2 == 0import unittest
class TestExample(unittest.TestCase):
def test_with_subtests(self):
for i in range(5):
with self.subTest("custom message", i=i):
self.assertEqual(i % 2, 0)Both approaches provide individual failure reporting - if some subtests pass and others fail, you'll see detailed output for each failing subtest while still getting information about which ones passed.
Main interface for creating subtests within pytest functions.
class SubTests:
"""Primary interface for creating subtests within pytest functions."""
@property
def item(self) -> pytest.Item:
"""Returns the current test item."""
def test(self, msg: str | None = None, **kwargs: Any) -> _SubTestContextManager:
"""
Creates a subtest context manager.
Args:
msg: Optional message for the subtest
**kwargs: Parameters for subtest identification
Returns:
Context manager for the subtest
"""Pytest fixture that provides a SubTests instance to test functions.
def subtests(request: SubRequest) -> Generator[SubTests, None, None]:
"""
Pytest fixture providing SubTests instance to test functions.
Args:
request: pytest sub-request object
Yields:
SubTests instance for creating subtests
"""Container for subtest context information.
class SubTestContext:
"""Container for subtest context information."""
msg: str | None
"""Optional message for the subtest."""
kwargs: dict[str, Any]
"""Parameters for subtest identification."""Custom test report for subtests with enhanced formatting.
class SubTestReport(TestReport):
"""Custom test report for subtests with enhanced formatting."""
context: SubTestContext
"""Subtest context information."""
@property
def head_line(self) -> str:
"""Returns formatted header line for the report."""
def sub_test_description(self) -> str:
"""
Generates human-readable subtest description.
Returns:
Human-readable description of the subtest
"""
def _to_json(self) -> dict:
"""Serializes the report to JSON format."""
@classmethod
def _from_json(cls, reportdict: dict[str, Any]) -> SubTestReport:
"""Creates SubTestReport from JSON data."""
@classmethod
def _from_test_report(cls, test_report: TestReport) -> SubTestReport:
"""Creates SubTestReport from a regular TestReport."""Context manager returned by SubTests.test() that handles subtest execution and reporting.
class _SubTestContextManager:
"""
Context manager for subtests, capturing exceptions and handling them
through the pytest machinery.
"""
def __enter__(self) -> None:
"""Enters the subtest context, sets up capturing and timing."""
def __exit__(
self,
exc_type: type[Exception] | None,
exc_val: Exception | None,
exc_tb: TracebackType | None
) -> bool:
"""
Exits the subtest context, processes any exceptions and generates reports.
Returns:
True to suppress the exception (subtest handling), False otherwise
"""The plugin adds command line options to control subtest behavior.
def pytest_addoption(parser: pytest.Parser) -> None:
"""
Adds command-line options for subtest behavior.
Args:
parser: pytest argument parser to add options to
Adds options:
--no-subtests-shortletter: Disables subtest output 'dots' in
non-verbose mode (EXPERIMENTAL)
"""The plugin integrates with pytest through several hooks for configuration and reporting.
def pytest_configure(config: pytest.Config) -> None:
"""
Configures plugin, patches TestCaseFunction for unittest compatibility.
Args:
config: pytest configuration object
Performs:
- Patches TestCaseFunction.addSubTest for subtest support
- Sets failfast=False to allow subtests to continue on failure
- Adds subtest status types to terminal reporter
- Updates color mapping for subtest outcomes
"""
def pytest_unconfigure() -> None:
"""
Cleans up plugin modifications when plugin is unconfigured.
Removes:
- TestCaseFunction.addSubTest attribute
- TestCaseFunction.failfast attribute
- Restores original TestCaseFunction.addSkip method
"""
def pytest_report_teststatus(
report: pytest.TestReport,
config: pytest.Config
) -> tuple[str, str, str | Mapping[str, bool]] | None:
"""
Customizes test status reporting for subtests.
Args:
report: test report to process
config: pytest configuration
Returns:
Tuple of (category, shortletter, verbose) for subtest reports,
None for non-subtest reports to let other handlers process them
Handles:
- SubTestReport instances with custom formatting
- xfail/xpass status for subtests
- Custom short letters (, - u y Y) for subtest outcomes
- Respects --no-subtests-shortletter option
"""
def pytest_report_to_serializable(
report: pytest.TestReport
) -> dict[str, Any] | None:
"""
Handles serialization of SubTestReport objects for distributed testing.
Args:
report: test report to serialize
Returns:
JSON-serializable dict for SubTestReport, None for other reports
"""
def pytest_report_from_serializable(
data: dict[str, Any]
) -> SubTestReport | None:
"""
Handles deserialization of SubTestReport objects from distributed testing.
Args:
data: serialized report data
Returns:
SubTestReport instance if data represents a SubTestReport, None otherwise
"""def test_multiple_values(subtests):
test_data = [
(2, 4), # pass
(3, 6), # pass
(4, 7), # fail - should be 8
(5, 10), # pass
]
for input_val, expected in test_data:
with subtests.test(input=input_val, expected=expected):
result = input_val * 2
assert result == expecteddef test_user_data_validation(subtests):
users = [
{"name": "Alice", "age": 25, "email": "alice@example.com"},
{"name": "Bob", "age": -5, "email": "invalid-email"}, # multiple issues
{"name": "", "age": 30, "email": "charlie@example.com"}, # empty name
]
for i, user in enumerate(users):
with subtests.test(msg=f"User {i+1}", user_name=user.get("name", "unknown")):
assert user["name"], "Name cannot be empty"
assert user["age"] > 0, "Age must be positive"
assert "@" in user["email"], "Email must contain @"import unittest
class DataProcessingTests(unittest.TestCase):
def test_process_multiple_files(self):
files = ["data1.txt", "data2.txt", "corrupted.txt", "data4.txt"]
for filename in files:
with self.subTest(filename=filename):
result = process_file(filename)
self.assertIsNotNone(result)
self.assertGreater(len(result), 0)The plugin handles exceptions within subtests gracefully:
Subtest failures are reported with clear identification:
____________________ test_example [custom message] (i=1) ____________________
____________________ test_example [custom message] (i=3) ____________________The format includes:
The package includes complete type annotations (py.typed marker file) for full type checking support with mypy and other type checkers.
from typing import Any, Generator, Mapping
from types import TracebackType
from _pytest.fixtures import SubRequest
from _pytest.reports import TestReport
import pytest
# Core types used throughout the API
Generator[SubTests, None, None]
# Generator that yields SubTests instances
tuple[str, str, str | Mapping[str, bool]] | None
# Return type for pytest_report_teststatus hook
dict[str, Any]
# Generic dictionary type used in serialization and kwargs
type[Exception] | None
# Exception type annotation for context manager