or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

artifacts-files.mdassertions.mdcli.mdconfiguration.mdcontext-cleanup.mdevents.mdexecution-control.mdindex.mdparameterization.mdtest-definition.md

configuration.mddocs/

0

# Configuration and Plugins

1

2

Configuration system with plugin management, dependency injection, and extensible architecture.

3

4

## Capabilities

5

6

### Core Configuration

7

8

Main configuration class with registry system and plugin management.

9

10

```python { .api }

11

class Config:

12

"""

13

Main configuration class for the Vedro testing framework.

14

15

Defines settings for framework behavior, plugin configuration,

16

and dependency injection through the Registry system.

17

"""

18

19

validate_plugins_configs: bool = True

20

"""Whether to validate plugin configurations to prevent errors."""

21

22

class Registry:

23

"""

24

Dependency injection registry for core components.

25

26

Provides Factory and Singleton patterns for configuring

27

framework components like finders, loaders, and runners.

28

"""

29

30

# Core singletons

31

Dispatcher: Singleton[Dispatcher]

32

ScenarioCollector: Singleton[ScenarioCollector]

33

34

# Core factories

35

ModuleLoader: Factory[ModuleLoader]

36

ScenarioFinder: Factory[ScenarioFinder]

37

ScenarioLoader: Factory[ScenarioLoader]

38

ScenarioOrderer: Factory[ScenarioOrderer]

39

ScenarioDiscoverer: Factory[ScenarioDiscoverer]

40

ScenarioScheduler: Factory[ScenarioScheduler]

41

ScenarioRunner: Factory[ScenarioRunner]

42

43

class Plugins:

44

"""

45

Plugin configuration container.

46

47

Contains nested configuration classes for each built-in plugin,

48

allowing fine-grained control over plugin behavior.

49

"""

50

51

# Output and reporting plugins

52

class Director: ...

53

class RichReporter: ...

54

class SilentReporter: ...

55

class PyCharmReporter: ...

56

57

# Test execution plugins

58

class Functioner: ...

59

class TempKeeper: ...

60

class Orderer: ...

61

class LastFailed: ...

62

class Repeater: ...

63

class Rerunner: ...

64

65

# Test control plugins

66

class Deferrer: ...

67

class Skipper: ...

68

class Slicer: ...

69

class Tagger: ...

70

71

# Utility plugins

72

class Seeder: ...

73

class Artifacted: ...

74

class AssertRewriter: ...

75

class DryRunner: ...

76

class Ensurer: ...

77

class Interrupter: ...

78

class SystemUpgrade: ...

79

class TipAdviser: ...

80

class Terminator: ...

81

82

def computed(fn: Callable) -> Callable:

83

"""

84

Decorator for computed configuration properties.

85

86

Marks a method as a computed property that should be

87

evaluated dynamically during configuration resolution.

88

89

Args:

90

fn: Method to mark as computed

91

92

Returns:

93

The method with computed metadata

94

"""

95

```

96

97

### Plugin System

98

99

Base classes and interfaces for creating custom plugins.

100

101

```python { .api }

102

class Plugin:

103

"""

104

Abstract base class for vedro plugins.

105

106

Plugins extend framework functionality by subscribing to events

107

and providing additional capabilities during test execution.

108

"""

109

110

def __init__(self, config: PluginConfig): ...

111

def subscribe(self, dispatcher: Dispatcher) -> None: ...

112

113

class PluginConfig:

114

"""

115

Base configuration class for plugins.

116

117

Provides common configuration interface and enables

118

plugin-specific settings management.

119

"""

120

121

enabled: bool = True

122

plugin: Type[Plugin] = None

123

```

124

125

### Event Dispatcher

126

127

Central event system for plugin communication and framework extensibility.

128

129

```python { .api }

130

class Dispatcher:

131

"""

132

Event dispatcher for plugin communication.

133

134

Manages event subscriptions and handles event broadcasting

135

to all registered listeners throughout the framework.

136

"""

137

138

def listen(self, event_type: Type[Event], listener: Callable) -> None: ...

139

def fire(self, event: Event) -> None: ...

140

141

class Subscriber:

142

"""

143

Protocol interface for event subscribers.

144

145

Defines the contract for objects that can subscribe to events

146

through the dispatcher system.

147

"""

148

149

def subscribe(self, dispatcher: Dispatcher) -> None: ...

150

```

151

152

## Usage Examples

153

154

### Basic Configuration

155

156

```python

157

# vedro.cfg.py - Basic configuration file

158

import vedro

159

from vedro import Config

160

161

class CustomConfig(vedro.Config):

162

"""Custom configuration for project-specific settings."""

163

164

# Enable/disable built-in plugins

165

class Plugins(vedro.Config.Plugins):

166

class RichReporter(vedro.Config.Plugins.RichReporter):

167

enabled = True

168

show_timings = True

169

show_artifacts = True

170

171

class LastFailed(vedro.Config.Plugins.LastFailed):

172

enabled = True

173

cache_file = ".vedro_last_failed"

174

175

class Orderer(vedro.Config.Plugins.Orderer):

176

enabled = True

177

# Options: "declaration", "random", "reversed"

178

orderer = "random"

179

180

class Slicer(vedro.Config.Plugins.Slicer):

181

enabled = True

182

# Run subset of tests

183

# slice = "1/4" # Run first quarter

184

185

class Tagger(vedro.Config.Plugins.Tagger):

186

enabled = True

187

# tags = ["smoke", "integration"]

188

```

189

190

### Advanced Configuration with Custom Components

191

192

```python

193

# vedro.cfg.py - Advanced configuration

194

import vedro

195

from vedro import Config, computed

196

from vedro.core import *

197

from pathlib import Path

198

199

class AdvancedConfig(vedro.Config):

200

"""Advanced configuration with custom components."""

201

202

# Custom registry configuration

203

class Registry(vedro.Config.Registry):

204

# Custom scenario finder with additional filters

205

ScenarioFinder = Factory[ScenarioFinder](lambda: ScenarioFileFinder(

206

file_filter=AnyFilter([

207

HiddenFilter(),

208

DunderFilter(),

209

ExtFilter(only=["py"]),

210

CustomTestFilter() # Custom filter

211

]),

212

dir_filter=AnyFilter([

213

HiddenFilter(),

214

DunderFilter(),

215

IgnoreDirectoryFilter(["__pycache__", ".git", "venv"])

216

])

217

))

218

219

# Custom scenario orderer

220

ScenarioOrderer = Factory[ScenarioOrderer](CustomScenarioOrderer)

221

222

# Custom runner with additional features

223

ScenarioRunner = Factory[ScenarioRunner](lambda: EnhancedScenarioRunner(

224

dispatcher=Config.Registry.Dispatcher(),

225

interrupt_exceptions=(KeyboardInterrupt, SystemExit),

226

timeout_seconds=300, # 5 minute timeout per scenario

227

retry_failed=True

228

))

229

230

class Plugins(vedro.Config.Plugins):

231

# Custom plugin configuration

232

class CustomReporter(CustomReporterConfig):

233

enabled = True

234

output_format = "json"

235

output_file = "test_results.json"

236

include_artifacts = True

237

238

class DatabaseCleaner(DatabaseCleanerConfig):

239

enabled = True

240

cleanup_after_each_scenario = True

241

test_database_url = "sqlite:///test.db"

242

243

# Modify built-in plugin behavior

244

class Slicer(vedro.Config.Plugins.Slicer):

245

enabled = True

246

247

@computed

248

def depends_on(cls):

249

# Ensure Skipper runs before Slicer

250

return [AdvancedConfig.Plugins.Skipper]

251

```

252

253

### Custom Plugin Development

254

255

```python

256

from vedro.core import Plugin, PluginConfig

257

from vedro.events import *

258

import json

259

import time

260

261

class PerformanceMonitorPlugin(Plugin):

262

"""Plugin to monitor test performance and generate reports."""

263

264

def __init__(self, config: "PerformanceMonitorConfig"):

265

super().__init__(config)

266

self.config = config

267

self.performance_data = {

268

"scenarios": {},

269

"steps": {},

270

"session_start": None,

271

"session_end": None

272

}

273

274

def subscribe(self, dispatcher: Dispatcher):

275

"""Subscribe to relevant events."""

276

dispatcher.listen(StartupEvent, self.on_startup)

277

dispatcher.listen(CleanupEvent, self.on_cleanup)

278

279

dispatcher.listen(ScenarioRunEvent, self.on_scenario_start)

280

dispatcher.listen(ScenarioPassedEvent, self.on_scenario_end)

281

dispatcher.listen(ScenarioFailedEvent, self.on_scenario_end)

282

283

dispatcher.listen(StepRunEvent, self.on_step_start)

284

dispatcher.listen(StepPassedEvent, self.on_step_end)

285

dispatcher.listen(StepFailedEvent, self.on_step_end)

286

287

def on_startup(self, event: StartupEvent):

288

"""Record session start time."""

289

self.performance_data["session_start"] = time.time()

290

print(f"Performance monitoring started for {len(event.scheduler.discovered)} scenarios")

291

292

def on_scenario_start(self, event: ScenarioRunEvent):

293

"""Record scenario start time."""

294

scenario_id = event.scenario_result.scenario.unique_id

295

self.performance_data["scenarios"][scenario_id] = {

296

"subject": event.scenario_result.scenario.subject,

297

"start_time": time.time(),

298

"steps": {}

299

}

300

301

def on_scenario_end(self, event):

302

"""Record scenario completion and duration."""

303

scenario_id = event.scenario_result.scenario.unique_id

304

if scenario_id in self.performance_data["scenarios"]:

305

scenario_data = self.performance_data["scenarios"][scenario_id]

306

scenario_data["end_time"] = time.time()

307

scenario_data["duration"] = scenario_data["end_time"] - scenario_data["start_time"]

308

scenario_data["status"] = event.scenario_result.status.value

309

310

# Check for performance thresholds

311

if scenario_data["duration"] > self.config.slow_scenario_threshold:

312

print(f"โš ๏ธ SLOW SCENARIO: {scenario_data['subject']} took {scenario_data['duration']:.2f}s")

313

314

def on_step_start(self, event: StepRunEvent):

315

"""Record step start time."""

316

scenario_id = event.step_result.scenario_result.scenario.unique_id

317

step_name = event.step_result.step_name

318

319

if scenario_id in self.performance_data["scenarios"]:

320

self.performance_data["scenarios"][scenario_id]["steps"][step_name] = {

321

"start_time": time.time()

322

}

323

324

def on_step_end(self, event):

325

"""Record step completion and duration."""

326

scenario_id = event.step_result.scenario_result.scenario.unique_id

327

step_name = event.step_result.step_name

328

329

if (scenario_id in self.performance_data["scenarios"] and

330

step_name in self.performance_data["scenarios"][scenario_id]["steps"]):

331

332

step_data = self.performance_data["scenarios"][scenario_id]["steps"][step_name]

333

step_data["end_time"] = time.time()

334

step_data["duration"] = step_data["end_time"] - step_data["start_time"]

335

step_data["status"] = event.step_result.status.value

336

337

# Check for step performance thresholds

338

if step_data["duration"] > self.config.slow_step_threshold:

339

print(f"โš ๏ธ SLOW STEP: {step_name} took {step_data['duration']:.2f}s")

340

341

def on_cleanup(self, event: CleanupEvent):

342

"""Generate performance report."""

343

self.performance_data["session_end"] = time.time()

344

self.performance_data["total_duration"] = (

345

self.performance_data["session_end"] - self.performance_data["session_start"]

346

)

347

348

# Generate report

349

if self.config.generate_report:

350

self._generate_performance_report()

351

352

# Print summary

353

self._print_performance_summary()

354

355

def _generate_performance_report(self):

356

"""Generate detailed JSON performance report."""

357

report_data = {

358

"session": {

359

"start_time": self.performance_data["session_start"],

360

"end_time": self.performance_data["session_end"],

361

"total_duration": self.performance_data["total_duration"],

362

"scenario_count": len(self.performance_data["scenarios"])

363

},

364

"scenarios": [],

365

"summary": self._calculate_summary()

366

}

367

368

# Add scenario details

369

for scenario_id, scenario_data in self.performance_data["scenarios"].items():

370

scenario_report = {

371

"id": scenario_id,

372

"subject": scenario_data["subject"],

373

"duration": scenario_data.get("duration", 0),

374

"status": scenario_data.get("status", "unknown"),

375

"steps": []

376

}

377

378

# Add step details

379

for step_name, step_data in scenario_data["steps"].items():

380

step_report = {

381

"name": step_name,

382

"duration": step_data.get("duration", 0),

383

"status": step_data.get("status", "unknown")

384

}

385

scenario_report["steps"].append(step_report)

386

387

report_data["scenarios"].append(scenario_report)

388

389

# Write report to file

390

with open(self.config.report_file, 'w') as f:

391

json.dump(report_data, f, indent=2)

392

393

print(f"๐Ÿ“Š Performance report written to {self.config.report_file}")

394

395

def _calculate_summary(self):

396

"""Calculate performance summary statistics."""

397

durations = [

398

data.get("duration", 0)

399

for data in self.performance_data["scenarios"].values()

400

if "duration" in data

401

]

402

403

if not durations:

404

return {"avg_duration": 0, "max_duration": 0, "min_duration": 0}

405

406

return {

407

"avg_duration": sum(durations) / len(durations),

408

"max_duration": max(durations),

409

"min_duration": min(durations),

410

"slow_scenarios": len([d for d in durations if d > self.config.slow_scenario_threshold])

411

}

412

413

def _print_performance_summary(self):

414

"""Print performance summary to console."""

415

summary = self._calculate_summary()

416

417

print("\n" + "="*50)

418

print("๐Ÿ“Š PERFORMANCE SUMMARY")

419

print("="*50)

420

print(f"Total session time: {self.performance_data['total_duration']:.2f}s")

421

print(f"Average scenario time: {summary['avg_duration']:.2f}s")

422

print(f"Slowest scenario: {summary['max_duration']:.2f}s")

423

print(f"Fastest scenario: {summary['min_duration']:.2f}s")

424

print(f"Slow scenarios (>{self.config.slow_scenario_threshold}s): {summary['slow_scenarios']}")

425

print("="*50)

426

427

class PerformanceMonitorConfig(PluginConfig):

428

"""Configuration for the Performance Monitor plugin."""

429

430

plugin = PerformanceMonitorPlugin

431

enabled = False # Enable when needed

432

433

# Performance thresholds

434

slow_scenario_threshold: float = 10.0 # seconds

435

slow_step_threshold: float = 5.0 # seconds

436

437

# Report generation

438

generate_report: bool = True

439

report_file: str = "performance_report.json"

440

441

# Add to configuration

442

class Config(vedro.Config):

443

class Plugins(vedro.Config.Plugins):

444

class PerformanceMonitor(PerformanceMonitorConfig):

445

enabled = True

446

slow_scenario_threshold = 5.0

447

report_file = "perf_report.json"

448

```

449

450

### Dependency Injection Patterns

451

452

```python

453

from vedro.core import Factory, Singleton, Container

454

455

class CustomConfig(vedro.Config):

456

"""Configuration demonstrating dependency injection patterns."""

457

458

class Registry(vedro.Config.Registry):

459

# Singleton - single instance shared across the application

460

DatabaseConnection = Singleton[DatabaseConnection](

461

lambda: DatabaseConnection("sqlite:///test.db")

462

)

463

464

# Factory - new instance created each time

465

HttpClient = Factory[HttpClient](

466

lambda: HttpClient(timeout=30, retries=3)

467

)

468

469

# Factory with dependencies

470

ApiService = Factory[ApiService](lambda: ApiService(

471

http_client=Config.Registry.HttpClient(),

472

database=Config.Registry.DatabaseConnection()

473

))

474

475

# Custom factory with configuration

476

CustomReporter = Factory[CustomReporter](lambda: CustomReporter(

477

output_dir=Path("test_reports"),

478

format="json",

479

include_screenshots=True

480

))

481

482

# Usage in plugins

483

class DatabaseTestPlugin(Plugin):

484

def __init__(self, config):

485

super().__init__(config)

486

487

# Access shared database connection

488

self.db = Config.Registry.DatabaseConnection()

489

490

# Create new HTTP client instance

491

self.http_client = Config.Registry.HttpClient()

492

```

493

494

## Types

495

496

### Configuration Types

497

498

Core types used in the configuration system:

499

500

```python { .api }

501

from typing import Type, Sequence, Any, Callable

502

503

# Factory and container types

504

FactoryType = Callable[[], Any]

505

Container = Any # Dependency injection container

506

507

# Plugin types

508

PluginType = Type[Plugin]

509

ConfigType = Type[PluginConfig]

510

511

# Computed property support

512

ComputedProperty = Callable[[], Any]

513

```

514

515

## Advanced Patterns

516

517

### Environment-based Configuration

518

519

Configure different settings for different environments:

520

521

```python

522

import os

523

from pathlib import Path

524

525

class EnvironmentConfig(vedro.Config):

526

"""Environment-aware configuration."""

527

528

@property

529

def environment(self):

530

return os.environ.get("TEST_ENV", "development")

531

532

class Plugins(vedro.Config.Plugins):

533

class RichReporter(vedro.Config.Plugins.RichReporter):

534

@computed

535

def enabled(cls):

536

# Disable rich output in CI

537

return os.environ.get("CI") != "true"

538

539

class SilentReporter(vedro.Config.Plugins.SilentReporter):

540

@computed

541

def enabled(cls):

542

# Enable silent mode in CI

543

return os.environ.get("CI") == "true"

544

545

class LastFailed(vedro.Config.Plugins.LastFailed):

546

@computed

547

def cache_file(cls):

548

# Different cache files per environment

549

env = os.environ.get("TEST_ENV", "development")

550

return f".vedro_last_failed_{env}"

551

```

552

553

### Conditional Plugin Loading

554

555

Load plugins based on runtime conditions:

556

557

```python

558

class ConditionalConfig(vedro.Config):

559

"""Configuration with conditional plugin loading."""

560

561

class Plugins(vedro.Config.Plugins):

562

# Only enable performance monitoring in development

563

class PerformanceMonitor(PerformanceMonitorConfig):

564

@computed

565

def enabled(cls):

566

return os.environ.get("TEST_ENV") == "development"

567

568

# Enable database plugins only if database is available

569

class DatabaseCleaner(DatabaseCleanerConfig):

570

@computed

571

def enabled(cls):

572

return os.environ.get("DATABASE_URL") is not None

573

574

# Enable screenshot capture only with GUI tests

575

class ScreenshotCapture(ScreenshotCaptureConfig):

576

@computed

577

def enabled(cls):

578

return "--gui" in sys.argv

579

```