CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-vedro

Pragmatic Testing Framework for Python with BDD-style syntax and pluggable architecture

49

1.08x
Quality

Pending

Does it follow best practices?

Impact

49%

1.08x

Average score across 10 eval scenarios

SecuritybySnyk

Pending

The risk profile of this skill

Overview
Eval results
Files

configuration.mddocs/

Configuration and Plugins

Configuration system with plugin management, dependency injection, and extensible architecture.

Capabilities

Core Configuration

Main configuration class with registry system and plugin management.

class Config:
    """
    Main configuration class for the Vedro testing framework.
    
    Defines settings for framework behavior, plugin configuration,
    and dependency injection through the Registry system.
    """
    
    validate_plugins_configs: bool = True
    """Whether to validate plugin configurations to prevent errors."""
    
    class Registry:
        """
        Dependency injection registry for core components.
        
        Provides Factory and Singleton patterns for configuring
        framework components like finders, loaders, and runners.
        """
        
        # Core singletons
        Dispatcher: Singleton[Dispatcher]
        ScenarioCollector: Singleton[ScenarioCollector]
        
        # Core factories  
        ModuleLoader: Factory[ModuleLoader]
        ScenarioFinder: Factory[ScenarioFinder]
        ScenarioLoader: Factory[ScenarioLoader]
        ScenarioOrderer: Factory[ScenarioOrderer]
        ScenarioDiscoverer: Factory[ScenarioDiscoverer]
        ScenarioScheduler: Factory[ScenarioScheduler]
        ScenarioRunner: Factory[ScenarioRunner]
    
    class Plugins:
        """
        Plugin configuration container.
        
        Contains nested configuration classes for each built-in plugin,
        allowing fine-grained control over plugin behavior.
        """
        
        # Output and reporting plugins
        class Director: ...
        class RichReporter: ...
        class SilentReporter: ...
        class PyCharmReporter: ...
        
        # Test execution plugins
        class Functioner: ...
        class TempKeeper: ...
        class Orderer: ...
        class LastFailed: ...
        class Repeater: ...
        class Rerunner: ...
        
        # Test control plugins
        class Deferrer: ...
        class Skipper: ...
        class Slicer: ...
        class Tagger: ...
        
        # Utility plugins
        class Seeder: ...
        class Artifacted: ...
        class AssertRewriter: ...
        class DryRunner: ...
        class Ensurer: ...
        class Interrupter: ...
        class SystemUpgrade: ...
        class TipAdviser: ...
        class Terminator: ...

def computed(fn: Callable) -> Callable:
    """
    Decorator for computed configuration properties.
    
    Marks a method as a computed property that should be
    evaluated dynamically during configuration resolution.
    
    Args:
        fn: Method to mark as computed
        
    Returns:
        The method with computed metadata
    """

Plugin System

Base classes and interfaces for creating custom plugins.

class Plugin:
    """
    Abstract base class for vedro plugins.
    
    Plugins extend framework functionality by subscribing to events
    and providing additional capabilities during test execution.
    """
    
    def __init__(self, config: PluginConfig): ...
    def subscribe(self, dispatcher: Dispatcher) -> None: ...

class PluginConfig:
    """
    Base configuration class for plugins.
    
    Provides common configuration interface and enables
    plugin-specific settings management.
    """
    
    enabled: bool = True
    plugin: Type[Plugin] = None

Event Dispatcher

Central event system for plugin communication and framework extensibility.

class Dispatcher:
    """
    Event dispatcher for plugin communication.
    
    Manages event subscriptions and handles event broadcasting
    to all registered listeners throughout the framework.
    """
    
    def listen(self, event_type: Type[Event], listener: Callable) -> None: ...
    def fire(self, event: Event) -> None: ...

class Subscriber:
    """
    Protocol interface for event subscribers.
    
    Defines the contract for objects that can subscribe to events
    through the dispatcher system.
    """
    
    def subscribe(self, dispatcher: Dispatcher) -> None: ...

Usage Examples

Basic Configuration

# vedro.cfg.py - Basic configuration file
import vedro
from vedro import Config

class CustomConfig(vedro.Config):
    """Custom configuration for project-specific settings."""
    
    # Enable/disable built-in plugins
    class Plugins(vedro.Config.Plugins):
        class RichReporter(vedro.Config.Plugins.RichReporter):
            enabled = True
            show_timings = True
            show_artifacts = True
        
        class LastFailed(vedro.Config.Plugins.LastFailed):
            enabled = True
            cache_file = ".vedro_last_failed"
        
        class Orderer(vedro.Config.Plugins.Orderer):
            enabled = True
            # Options: "declaration", "random", "reversed"
            orderer = "random"  
        
        class Slicer(vedro.Config.Plugins.Slicer):
            enabled = True
            # Run subset of tests
            # slice = "1/4"  # Run first quarter
        
        class Tagger(vedro.Config.Plugins.Tagger):
            enabled = True
            # tags = ["smoke", "integration"]

Advanced Configuration with Custom Components

# vedro.cfg.py - Advanced configuration
import vedro
from vedro import Config, computed
from vedro.core import *
from pathlib import Path

class AdvancedConfig(vedro.Config):
    """Advanced configuration with custom components."""
    
    # Custom registry configuration
    class Registry(vedro.Config.Registry):
        # Custom scenario finder with additional filters
        ScenarioFinder = Factory[ScenarioFinder](lambda: ScenarioFileFinder(
            file_filter=AnyFilter([
                HiddenFilter(),
                DunderFilter(), 
                ExtFilter(only=["py"]),
                CustomTestFilter()  # Custom filter
            ]),
            dir_filter=AnyFilter([
                HiddenFilter(),
                DunderFilter(),
                IgnoreDirectoryFilter(["__pycache__", ".git", "venv"])
            ])
        ))
        
        # Custom scenario orderer
        ScenarioOrderer = Factory[ScenarioOrderer](CustomScenarioOrderer)
        
        # Custom runner with additional features
        ScenarioRunner = Factory[ScenarioRunner](lambda: EnhancedScenarioRunner(
            dispatcher=Config.Registry.Dispatcher(),
            interrupt_exceptions=(KeyboardInterrupt, SystemExit),
            timeout_seconds=300,  # 5 minute timeout per scenario
            retry_failed=True
        ))
    
    class Plugins(vedro.Config.Plugins):
        # Custom plugin configuration
        class CustomReporter(CustomReporterConfig):
            enabled = True
            output_format = "json"
            output_file = "test_results.json"
            include_artifacts = True
        
        class DatabaseCleaner(DatabaseCleanerConfig):
            enabled = True
            cleanup_after_each_scenario = True
            test_database_url = "sqlite:///test.db"
        
        # Modify built-in plugin behavior
        class Slicer(vedro.Config.Plugins.Slicer):
            enabled = True
            
            @computed
            def depends_on(cls):
                # Ensure Skipper runs before Slicer
                return [AdvancedConfig.Plugins.Skipper]

Custom Plugin Development

from vedro.core import Plugin, PluginConfig
from vedro.events import *
import json
import time

class PerformanceMonitorPlugin(Plugin):
    """Plugin to monitor test performance and generate reports."""
    
    def __init__(self, config: "PerformanceMonitorConfig"):
        super().__init__(config)
        self.config = config
        self.performance_data = {
            "scenarios": {},
            "steps": {},
            "session_start": None,
            "session_end": None
        }
    
    def subscribe(self, dispatcher: Dispatcher):
        """Subscribe to relevant events."""
        dispatcher.listen(StartupEvent, self.on_startup)
        dispatcher.listen(CleanupEvent, self.on_cleanup)
        
        dispatcher.listen(ScenarioRunEvent, self.on_scenario_start)
        dispatcher.listen(ScenarioPassedEvent, self.on_scenario_end)
        dispatcher.listen(ScenarioFailedEvent, self.on_scenario_end)
        
        dispatcher.listen(StepRunEvent, self.on_step_start)
        dispatcher.listen(StepPassedEvent, self.on_step_end)
        dispatcher.listen(StepFailedEvent, self.on_step_end)
    
    def on_startup(self, event: StartupEvent):
        """Record session start time."""
        self.performance_data["session_start"] = time.time()
        print(f"Performance monitoring started for {len(event.scheduler.discovered)} scenarios")
    
    def on_scenario_start(self, event: ScenarioRunEvent):
        """Record scenario start time."""
        scenario_id = event.scenario_result.scenario.unique_id
        self.performance_data["scenarios"][scenario_id] = {
            "subject": event.scenario_result.scenario.subject,
            "start_time": time.time(),
            "steps": {}
        }
    
    def on_scenario_end(self, event):
        """Record scenario completion and duration."""
        scenario_id = event.scenario_result.scenario.unique_id
        if scenario_id in self.performance_data["scenarios"]:
            scenario_data = self.performance_data["scenarios"][scenario_id]
            scenario_data["end_time"] = time.time()
            scenario_data["duration"] = scenario_data["end_time"] - scenario_data["start_time"]
            scenario_data["status"] = event.scenario_result.status.value
            
            # Check for performance thresholds
            if scenario_data["duration"] > self.config.slow_scenario_threshold:
                print(f"⚠️  SLOW SCENARIO: {scenario_data['subject']} took {scenario_data['duration']:.2f}s")
    
    def on_step_start(self, event: StepRunEvent):
        """Record step start time."""
        scenario_id = event.step_result.scenario_result.scenario.unique_id
        step_name = event.step_result.step_name
        
        if scenario_id in self.performance_data["scenarios"]:
            self.performance_data["scenarios"][scenario_id]["steps"][step_name] = {
                "start_time": time.time()
            }
    
    def on_step_end(self, event):
        """Record step completion and duration."""
        scenario_id = event.step_result.scenario_result.scenario.unique_id
        step_name = event.step_result.step_name
        
        if (scenario_id in self.performance_data["scenarios"] and 
            step_name in self.performance_data["scenarios"][scenario_id]["steps"]):
            
            step_data = self.performance_data["scenarios"][scenario_id]["steps"][step_name]
            step_data["end_time"] = time.time()
            step_data["duration"] = step_data["end_time"] - step_data["start_time"]
            step_data["status"] = event.step_result.status.value
            
            # Check for step performance thresholds
            if step_data["duration"] > self.config.slow_step_threshold:
                print(f"⚠️  SLOW STEP: {step_name} took {step_data['duration']:.2f}s")
    
    def on_cleanup(self, event: CleanupEvent):
        """Generate performance report."""
        self.performance_data["session_end"] = time.time()
        self.performance_data["total_duration"] = (
            self.performance_data["session_end"] - self.performance_data["session_start"]
        )
        
        # Generate report
        if self.config.generate_report:
            self._generate_performance_report()
        
        # Print summary
        self._print_performance_summary()
    
    def _generate_performance_report(self):
        """Generate detailed JSON performance report."""
        report_data = {
            "session": {
                "start_time": self.performance_data["session_start"],
                "end_time": self.performance_data["session_end"],
                "total_duration": self.performance_data["total_duration"],
                "scenario_count": len(self.performance_data["scenarios"])
            },
            "scenarios": [],
            "summary": self._calculate_summary()
        }
        
        # Add scenario details
        for scenario_id, scenario_data in self.performance_data["scenarios"].items():
            scenario_report = {
                "id": scenario_id,
                "subject": scenario_data["subject"],
                "duration": scenario_data.get("duration", 0),
                "status": scenario_data.get("status", "unknown"),
                "steps": []
            }
            
            # Add step details
            for step_name, step_data in scenario_data["steps"].items():
                step_report = {
                    "name": step_name,
                    "duration": step_data.get("duration", 0),
                    "status": step_data.get("status", "unknown")
                }
                scenario_report["steps"].append(step_report)
            
            report_data["scenarios"].append(scenario_report)
        
        # Write report to file
        with open(self.config.report_file, 'w') as f:
            json.dump(report_data, f, indent=2)
        
        print(f"📊 Performance report written to {self.config.report_file}")
    
    def _calculate_summary(self):
        """Calculate performance summary statistics."""
        durations = [
            data.get("duration", 0) 
            for data in self.performance_data["scenarios"].values()
            if "duration" in data
        ]
        
        if not durations:
            return {"avg_duration": 0, "max_duration": 0, "min_duration": 0}
        
        return {
            "avg_duration": sum(durations) / len(durations),
            "max_duration": max(durations),
            "min_duration": min(durations),
            "slow_scenarios": len([d for d in durations if d > self.config.slow_scenario_threshold])
        }
    
    def _print_performance_summary(self):
        """Print performance summary to console."""
        summary = self._calculate_summary()
        
        print("\n" + "="*50)
        print("📊 PERFORMANCE SUMMARY")
        print("="*50)
        print(f"Total session time: {self.performance_data['total_duration']:.2f}s")
        print(f"Average scenario time: {summary['avg_duration']:.2f}s")
        print(f"Slowest scenario: {summary['max_duration']:.2f}s")
        print(f"Fastest scenario: {summary['min_duration']:.2f}s")
        print(f"Slow scenarios (>{self.config.slow_scenario_threshold}s): {summary['slow_scenarios']}")
        print("="*50)

class PerformanceMonitorConfig(PluginConfig):
    """Configuration for the Performance Monitor plugin."""
    
    plugin = PerformanceMonitorPlugin
    enabled = False  # Enable when needed
    
    # Performance thresholds
    slow_scenario_threshold: float = 10.0  # seconds
    slow_step_threshold: float = 5.0       # seconds
    
    # Report generation
    generate_report: bool = True
    report_file: str = "performance_report.json"

# Add to configuration
class Config(vedro.Config):
    class Plugins(vedro.Config.Plugins):
        class PerformanceMonitor(PerformanceMonitorConfig):
            enabled = True
            slow_scenario_threshold = 5.0
            report_file = "perf_report.json"

Dependency Injection Patterns

from vedro.core import Factory, Singleton, Container

class CustomConfig(vedro.Config):
    """Configuration demonstrating dependency injection patterns."""
    
    class Registry(vedro.Config.Registry):
        # Singleton - single instance shared across the application
        DatabaseConnection = Singleton[DatabaseConnection](
            lambda: DatabaseConnection("sqlite:///test.db")
        )
        
        # Factory - new instance created each time
        HttpClient = Factory[HttpClient](
            lambda: HttpClient(timeout=30, retries=3)
        )
        
        # Factory with dependencies
        ApiService = Factory[ApiService](lambda: ApiService(
            http_client=Config.Registry.HttpClient(),
            database=Config.Registry.DatabaseConnection()
        ))
        
        # Custom factory with configuration
        CustomReporter = Factory[CustomReporter](lambda: CustomReporter(
            output_dir=Path("test_reports"),
            format="json",
            include_screenshots=True
        ))

# Usage in plugins
class DatabaseTestPlugin(Plugin):
    def __init__(self, config):
        super().__init__(config)
        
        # Access shared database connection
        self.db = Config.Registry.DatabaseConnection()
        
        # Create new HTTP client instance
        self.http_client = Config.Registry.HttpClient()

Types

Configuration Types

Core types used in the configuration system:

from typing import Type, Sequence, Any, Callable

# Factory and container types
FactoryType = Callable[[], Any]
Container = Any  # Dependency injection container

# Plugin types
PluginType = Type[Plugin]
ConfigType = Type[PluginConfig]

# Computed property support
ComputedProperty = Callable[[], Any]

Advanced Patterns

Environment-based Configuration

Configure different settings for different environments:

import os
from pathlib import Path

class EnvironmentConfig(vedro.Config):
    """Environment-aware configuration."""
    
    @property
    def environment(self):
        return os.environ.get("TEST_ENV", "development")
    
    class Plugins(vedro.Config.Plugins):
        class RichReporter(vedro.Config.Plugins.RichReporter):
            @computed
            def enabled(cls):
                # Disable rich output in CI
                return os.environ.get("CI") != "true"
        
        class SilentReporter(vedro.Config.Plugins.SilentReporter):
            @computed  
            def enabled(cls):
                # Enable silent mode in CI
                return os.environ.get("CI") == "true"
        
        class LastFailed(vedro.Config.Plugins.LastFailed):
            @computed
            def cache_file(cls):
                # Different cache files per environment
                env = os.environ.get("TEST_ENV", "development")
                return f".vedro_last_failed_{env}"

Conditional Plugin Loading

Load plugins based on runtime conditions:

class ConditionalConfig(vedro.Config):
    """Configuration with conditional plugin loading."""
    
    class Plugins(vedro.Config.Plugins):
        # Only enable performance monitoring in development
        class PerformanceMonitor(PerformanceMonitorConfig):
            @computed
            def enabled(cls):
                return os.environ.get("TEST_ENV") == "development"
        
        # Enable database plugins only if database is available
        class DatabaseCleaner(DatabaseCleanerConfig):
            @computed
            def enabled(cls):
                return os.environ.get("DATABASE_URL") is not None
        
        # Enable screenshot capture only with GUI tests
        class ScreenshotCapture(ScreenshotCaptureConfig):
            @computed
            def enabled(cls):
                return "--gui" in sys.argv

docs

artifacts-files.md

assertions.md

cli.md

configuration.md

context-cleanup.md

events.md

execution-control.md

index.md

parameterization.md

test-definition.md

tile.json