Developer-friendly load testing framework for HTTP and other protocols with distributed testing capabilities.
—
Wait time functions control the timing between task executions, enabling realistic simulation of user behavior patterns. Locust provides functions for random intervals, constant delays, pacing control, and throughput management.
Generate random wait times between minimum and maximum values to simulate natural user behavior variations.
def between(min_wait, max_wait):
"""
Return a function that generates random wait times between min and max.
Args:
min_wait (float): Minimum wait time in seconds
max_wait (float): Maximum wait time in seconds
Returns:
callable: Function that returns random float between min_wait and max_wait
Usage:
wait_time = between(1, 5) # Wait 1-5 seconds randomly
"""Generate fixed wait times for predictable user behavior patterns.
def constant(wait_time):
"""
Return a function that generates constant wait times.
Args:
wait_time (float): Fixed wait time in seconds
Returns:
callable: Function that always returns the same wait time
Usage:
wait_time = constant(2) # Always wait exactly 2 seconds
"""Maintain consistent pacing between task executions, accounting for task execution time.
def constant_pacing(wait_time):
"""
Return a function that maintains constant pacing between tasks.
Ensures consistent intervals between task starts by subtracting
the task execution time from the wait time. If task takes longer
than the target pacing, no wait occurs.
Args:
wait_time (float): Target pacing interval in seconds
Returns:
callable: Function that returns adjusted wait time for consistent pacing
Usage:
wait_time = constant_pacing(2) # Start new task every 2 seconds
"""Maintain target throughput (tasks per second) across all users in the test.
def constant_throughput(task_runs_per_second):
"""
Return a function that maintains constant throughput across all users.
Calculates wait times to achieve target tasks per second globally.
Wait time is distributed across all active users to maintain
the specified throughput rate.
Args:
task_runs_per_second (float): Target tasks per second across all users
Returns:
callable: Function that returns wait time to maintain target throughput
Usage:
wait_time = constant_throughput(10.0) # 10 tasks/second total
"""from locust import HttpUser, task, between
import random
class VariableUser(HttpUser):
# Random wait between 1-5 seconds
wait_time = between(1, 5)
@task
def browse_page(self):
# Simulate browsing different pages
pages = ["/", "/about", "/products", "/contact"]
page = random.choice(pages)
self.client.get(page)
@task(2) # More frequent task
def quick_action(self):
# Quick API call
self.client.get("/api/status")
# Can also use different ranges for different user classes
class FastUser(HttpUser):
wait_time = between(0.5, 2) # Fast user, short waits
class SlowUser(HttpUser):
wait_time = between(5, 15) # Slow user, long waitsfrom locust import HttpUser, task, constant
class SteadyUser(HttpUser):
# Always wait exactly 2 seconds
wait_time = constant(2)
@task
def regular_check(self):
# Regular health check every 2 seconds
self.client.get("/health")
@task
def data_sync(self):
# Regular data synchronization
self.client.post("/sync", json={"timestamp": time.time()})
class RapidFireUser(HttpUser):
# Very short constant wait for stress testing
wait_time = constant(0.1)
@task
def stress_endpoint(self):
self.client.get("/api/fast-endpoint")from locust import HttpUser, task, constant_pacing
import time
class PacedUser(HttpUser):
# Start new task every 3 seconds regardless of execution time
wait_time = constant_pacing(3)
@task
def fast_task(self):
# This task completes quickly (e.g., 0.1s)
# Next task will wait ~2.9s to maintain 3s pacing
self.client.get("/api/quick")
@task
def slow_task(self):
# This task takes longer (e.g., 2s)
# Next task will wait ~1s to maintain 3s pacing
self.client.get("/api/slow-operation")
@task
def very_slow_task(self):
# If this task takes >3s, next task starts immediately
# to maintain overall pacing as much as possible
self.client.get("/api/very-slow-operation")
# Good for simulating regular intervals like cron jobs
class ScheduledUser(HttpUser):
wait_time = constant_pacing(60) # Every 60 seconds
@task
def scheduled_report(self):
self.client.post("/generate-report")from locust import HttpUser, task, constant_throughput
class ThroughputUser(HttpUser):
# Maintain 50 requests per second across ALL users
wait_time = constant_throughput(50)
@task
def api_call(self):
self.client.get("/api/endpoint")
# With 10 users, each user will average 5 requests/second
# With 100 users, each user will average 0.5 requests/second
# Total throughput stays at 50 requests/second
class LoadTestUser(HttpUser):
# High throughput testing
wait_time = constant_throughput(1000) # 1000 requests/second total
@task(3)
def read_operation(self):
self.client.get("/api/data")
@task(1)
def write_operation(self):
self.client.post("/api/data", json={"value": random.randint(1, 100)})from locust import HttpUser, task, between
import random
class DynamicUser(HttpUser):
def wait_time(self):
"""Custom wait time function with dynamic behavior"""
# Different wait times based on time of day simulation
hour = (time.time() // 3600) % 24
if 9 <= hour <= 17: # Business hours
return random.uniform(1, 3) # Faster during business hours
else: # Off hours
return random.uniform(5, 15) # Slower during off hours
@task
def business_operation(self):
self.client.get("/api/business-data")
class ConditionalWaitUser(HttpUser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.error_count = 0
def wait_time(self):
"""Increase wait time after errors to simulate backoff"""
if self.error_count > 0:
# Exponential backoff after errors
wait = min(2 ** self.error_count, 30) # Max 30 seconds
self.error_count = max(0, self.error_count - 1) # Decay error count
return wait
else:
return random.uniform(1, 3)
@task
def api_with_error_handling(self):
response = self.client.get("/api/unreliable")
if response.status_code >= 500:
self.error_count += 1from locust import HttpUser, TaskSet, task, between, constant
class FastTaskSet(TaskSet):
# TaskSets can have their own wait times
wait_time = between(0.5, 1.5)
@task
def quick_task(self):
self.client.get("/api/fast")
class SlowTaskSet(TaskSet):
wait_time = between(5, 10)
@task
def slow_task(self):
self.client.get("/api/slow")
class MixedUser(HttpUser):
# User-level wait time (used when not in TaskSet)
wait_time = constant(2)
# Mix of TaskSets with different wait patterns
tasks = [FastTaskSet, SlowTaskSet]import random
import math
import time
def normal_distribution(mean, std_dev):
"""Wait time following normal distribution."""
def wait_func():
return max(0, random.normalvariate(mean, std_dev))
return wait_func
def sine_wave_wait(base_time, amplitude, period):
"""Wait time that varies in sine wave pattern."""
def wait_func():
current_time = time.time()
wave = math.sin(2 * math.pi * current_time / period)
return base_time + amplitude * wave
return wait_func
def load_profile_wait(profile_points):
"""Wait time based on predefined load profile."""
def wait_func():
current_time = time.time()
start_time = getattr(wait_func, 'start_time', current_time)
if not hasattr(wait_func, 'start_time'):
wait_func.start_time = start_time
elapsed = current_time - start_time
# Interpolate between profile points based on elapsed time
# ... implementation details ...
return calculated_wait_time
return wait_func
# Usage examples
class CustomWaitUser(HttpUser):
# Normal distribution around 2 seconds
wait_time = normal_distribution(2.0, 0.5)
@task
def normal_task(self):
self.client.get("/api/endpoint")
class SineWaveUser(HttpUser):
# Sine wave: 2±1 seconds over 60 second period
wait_time = sine_wave_wait(2.0, 1.0, 60.0)
@task
def wave_task(self):
self.client.get("/api/varying-load")from typing import Callable, Union
import random
# Wait time function type
WaitTimeFunction = Callable[[], float]
# Common wait time implementations
def between(min_wait: float, max_wait: float) -> WaitTimeFunction:
"""Return function generating random wait times between min and max."""
def wait_func() -> float:
return random.uniform(min_wait, max_wait)
return wait_func
def constant(wait_time: float) -> WaitTimeFunction:
"""Return function generating constant wait times."""
def wait_func() -> float:
return wait_time
return wait_func
def constant_pacing(wait_time: float) -> WaitTimeFunction:
"""Return function maintaining constant pacing between tasks."""
# Implementation maintains consistent intervals between task starts
def constant_throughput(task_runs_per_second: float) -> WaitTimeFunction:
"""Return function maintaining constant throughput across all users."""
# Implementation coordinates across users to maintain target throughput
# Custom wait time function signature
CustomWaitFunction = Callable[[], float]Install with Tessl CLI
npx tessl i tessl/pypi-locust