or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

contrib.mddebugging.mdevents.mdexceptions.mdindex.mdload-shapes.mdtasksets.mduser-classes.mdwait-time.md

wait-time.mddocs/

0

# Wait Time Functions

1

2

Wait time functions control the timing between task executions, enabling realistic simulation of user behavior patterns. Locust provides functions for random intervals, constant delays, pacing control, and throughput management.

3

4

## Capabilities

5

6

### Random Wait Times

7

8

Generate random wait times between minimum and maximum values to simulate natural user behavior variations.

9

10

```python { .api }

11

def between(min_wait, max_wait):

12

"""

13

Return a function that generates random wait times between min and max.

14

15

Args:

16

min_wait (float): Minimum wait time in seconds

17

max_wait (float): Maximum wait time in seconds

18

19

Returns:

20

callable: Function that returns random float between min_wait and max_wait

21

22

Usage:

23

wait_time = between(1, 5) # Wait 1-5 seconds randomly

24

"""

25

```

26

27

### Constant Wait Times

28

29

Generate fixed wait times for predictable user behavior patterns.

30

31

```python { .api }

32

def constant(wait_time):

33

"""

34

Return a function that generates constant wait times.

35

36

Args:

37

wait_time (float): Fixed wait time in seconds

38

39

Returns:

40

callable: Function that always returns the same wait time

41

42

Usage:

43

wait_time = constant(2) # Always wait exactly 2 seconds

44

"""

45

```

46

47

### Constant Pacing

48

49

Maintain consistent pacing between task executions, accounting for task execution time.

50

51

```python { .api }

52

def constant_pacing(wait_time):

53

"""

54

Return a function that maintains constant pacing between tasks.

55

56

Ensures consistent intervals between task starts by subtracting

57

the task execution time from the wait time. If task takes longer

58

than the target pacing, no wait occurs.

59

60

Args:

61

wait_time (float): Target pacing interval in seconds

62

63

Returns:

64

callable: Function that returns adjusted wait time for consistent pacing

65

66

Usage:

67

wait_time = constant_pacing(2) # Start new task every 2 seconds

68

"""

69

```

70

71

### Constant Throughput

72

73

Maintain target throughput (tasks per second) across all users in the test.

74

75

```python { .api }

76

def constant_throughput(task_runs_per_second):

77

"""

78

Return a function that maintains constant throughput across all users.

79

80

Calculates wait times to achieve target tasks per second globally.

81

Wait time is distributed across all active users to maintain

82

the specified throughput rate.

83

84

Args:

85

task_runs_per_second (float): Target tasks per second across all users

86

87

Returns:

88

callable: Function that returns wait time to maintain target throughput

89

90

Usage:

91

wait_time = constant_throughput(10.0) # 10 tasks/second total

92

"""

93

```

94

95

## Usage Examples

96

97

### Random Wait Time Example

98

99

```python

100

from locust import HttpUser, task, between

101

import random

102

103

class VariableUser(HttpUser):

104

# Random wait between 1-5 seconds

105

wait_time = between(1, 5)

106

107

@task

108

def browse_page(self):

109

# Simulate browsing different pages

110

pages = ["/", "/about", "/products", "/contact"]

111

page = random.choice(pages)

112

self.client.get(page)

113

114

@task(2) # More frequent task

115

def quick_action(self):

116

# Quick API call

117

self.client.get("/api/status")

118

119

# Can also use different ranges for different user classes

120

class FastUser(HttpUser):

121

wait_time = between(0.5, 2) # Fast user, short waits

122

123

class SlowUser(HttpUser):

124

wait_time = between(5, 15) # Slow user, long waits

125

```

126

127

### Constant Wait Time Example

128

129

```python

130

from locust import HttpUser, task, constant

131

132

class SteadyUser(HttpUser):

133

# Always wait exactly 2 seconds

134

wait_time = constant(2)

135

136

@task

137

def regular_check(self):

138

# Regular health check every 2 seconds

139

self.client.get("/health")

140

141

@task

142

def data_sync(self):

143

# Regular data synchronization

144

self.client.post("/sync", json={"timestamp": time.time()})

145

146

class RapidFireUser(HttpUser):

147

# Very short constant wait for stress testing

148

wait_time = constant(0.1)

149

150

@task

151

def stress_endpoint(self):

152

self.client.get("/api/fast-endpoint")

153

```

154

155

### Constant Pacing Example

156

157

```python

158

from locust import HttpUser, task, constant_pacing

159

import time

160

161

class PacedUser(HttpUser):

162

# Start new task every 3 seconds regardless of execution time

163

wait_time = constant_pacing(3)

164

165

@task

166

def fast_task(self):

167

# This task completes quickly (e.g., 0.1s)

168

# Next task will wait ~2.9s to maintain 3s pacing

169

self.client.get("/api/quick")

170

171

@task

172

def slow_task(self):

173

# This task takes longer (e.g., 2s)

174

# Next task will wait ~1s to maintain 3s pacing

175

self.client.get("/api/slow-operation")

176

177

@task

178

def very_slow_task(self):

179

# If this task takes >3s, next task starts immediately

180

# to maintain overall pacing as much as possible

181

self.client.get("/api/very-slow-operation")

182

183

# Good for simulating regular intervals like cron jobs

184

class ScheduledUser(HttpUser):

185

wait_time = constant_pacing(60) # Every 60 seconds

186

187

@task

188

def scheduled_report(self):

189

self.client.post("/generate-report")

190

```

191

192

### Constant Throughput Example

193

194

```python

195

from locust import HttpUser, task, constant_throughput

196

197

class ThroughputUser(HttpUser):

198

# Maintain 50 requests per second across ALL users

199

wait_time = constant_throughput(50)

200

201

@task

202

def api_call(self):

203

self.client.get("/api/endpoint")

204

205

# With 10 users, each user will average 5 requests/second

206

# With 100 users, each user will average 0.5 requests/second

207

# Total throughput stays at 50 requests/second

208

209

class LoadTestUser(HttpUser):

210

# High throughput testing

211

wait_time = constant_throughput(1000) # 1000 requests/second total

212

213

@task(3)

214

def read_operation(self):

215

self.client.get("/api/data")

216

217

@task(1)

218

def write_operation(self):

219

self.client.post("/api/data", json={"value": random.randint(1, 100)})

220

```

221

222

### Dynamic Wait Time Example

223

224

```python

225

from locust import HttpUser, task, between

226

import random

227

228

class DynamicUser(HttpUser):

229

def wait_time(self):

230

"""Custom wait time function with dynamic behavior"""

231

# Different wait times based on time of day simulation

232

hour = (time.time() // 3600) % 24

233

if 9 <= hour <= 17: # Business hours

234

return random.uniform(1, 3) # Faster during business hours

235

else: # Off hours

236

return random.uniform(5, 15) # Slower during off hours

237

238

@task

239

def business_operation(self):

240

self.client.get("/api/business-data")

241

242

class ConditionalWaitUser(HttpUser):

243

def __init__(self, *args, **kwargs):

244

super().__init__(*args, **kwargs)

245

self.error_count = 0

246

247

def wait_time(self):

248

"""Increase wait time after errors to simulate backoff"""

249

if self.error_count > 0:

250

# Exponential backoff after errors

251

wait = min(2 ** self.error_count, 30) # Max 30 seconds

252

self.error_count = max(0, self.error_count - 1) # Decay error count

253

return wait

254

else:

255

return random.uniform(1, 3)

256

257

@task

258

def api_with_error_handling(self):

259

response = self.client.get("/api/unreliable")

260

if response.status_code >= 500:

261

self.error_count += 1

262

```

263

264

### Wait Time with TaskSets

265

266

```python

267

from locust import HttpUser, TaskSet, task, between, constant

268

269

class FastTaskSet(TaskSet):

270

# TaskSets can have their own wait times

271

wait_time = between(0.5, 1.5)

272

273

@task

274

def quick_task(self):

275

self.client.get("/api/fast")

276

277

class SlowTaskSet(TaskSet):

278

wait_time = between(5, 10)

279

280

@task

281

def slow_task(self):

282

self.client.get("/api/slow")

283

284

class MixedUser(HttpUser):

285

# User-level wait time (used when not in TaskSet)

286

wait_time = constant(2)

287

288

# Mix of TaskSets with different wait patterns

289

tasks = [FastTaskSet, SlowTaskSet]

290

```

291

292

## Custom Wait Time Functions

293

294

```python

295

import random

296

import math

297

import time

298

299

def normal_distribution(mean, std_dev):

300

"""Wait time following normal distribution."""

301

def wait_func():

302

return max(0, random.normalvariate(mean, std_dev))

303

return wait_func

304

305

def sine_wave_wait(base_time, amplitude, period):

306

"""Wait time that varies in sine wave pattern."""

307

def wait_func():

308

current_time = time.time()

309

wave = math.sin(2 * math.pi * current_time / period)

310

return base_time + amplitude * wave

311

return wait_func

312

313

def load_profile_wait(profile_points):

314

"""Wait time based on predefined load profile."""

315

def wait_func():

316

current_time = time.time()

317

start_time = getattr(wait_func, 'start_time', current_time)

318

if not hasattr(wait_func, 'start_time'):

319

wait_func.start_time = start_time

320

321

elapsed = current_time - start_time

322

# Interpolate between profile points based on elapsed time

323

# ... implementation details ...

324

return calculated_wait_time

325

return wait_func

326

327

# Usage examples

328

class CustomWaitUser(HttpUser):

329

# Normal distribution around 2 seconds

330

wait_time = normal_distribution(2.0, 0.5)

331

332

@task

333

def normal_task(self):

334

self.client.get("/api/endpoint")

335

336

class SineWaveUser(HttpUser):

337

# Sine wave: 2±1 seconds over 60 second period

338

wait_time = sine_wave_wait(2.0, 1.0, 60.0)

339

340

@task

341

def wave_task(self):

342

self.client.get("/api/varying-load")

343

```

344

345

## Types

346

347

```python { .api }

348

from typing import Callable, Union

349

import random

350

351

# Wait time function type

352

WaitTimeFunction = Callable[[], float]

353

354

# Common wait time implementations

355

def between(min_wait: float, max_wait: float) -> WaitTimeFunction:

356

"""Return function generating random wait times between min and max."""

357

def wait_func() -> float:

358

return random.uniform(min_wait, max_wait)

359

return wait_func

360

361

def constant(wait_time: float) -> WaitTimeFunction:

362

"""Return function generating constant wait times."""

363

def wait_func() -> float:

364

return wait_time

365

return wait_func

366

367

def constant_pacing(wait_time: float) -> WaitTimeFunction:

368

"""Return function maintaining constant pacing between tasks."""

369

# Implementation maintains consistent intervals between task starts

370

371

def constant_throughput(task_runs_per_second: float) -> WaitTimeFunction:

372

"""Return function maintaining constant throughput across all users."""

373

# Implementation coordinates across users to maintain target throughput

374

375

# Custom wait time function signature

376

CustomWaitFunction = Callable[[], float]

377

```