Django middlewares to monitor your application with Prometheus.io
—
Comprehensive test utilities for asserting Prometheus metric values and changes in test suites. Enables precise testing of monitoring functionality and metric behavior.
Functions for testing metric values and changes in unit tests and integration tests.
def assert_metric_equal(expected_value, metric_name: str, registry=REGISTRY, **labels):
"""
Asserts that metric_name{**labels} == expected_value.
Parameters:
- expected_value: Expected metric value (int or float)
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Raises:
AssertionError: If metric value doesn't match expected value
"""
def assert_metric_not_equal(expected_value, metric_name: str, registry=REGISTRY, **labels):
"""
Asserts that metric_name{**labels} != expected_value.
Parameters:
- expected_value: Value that metric should NOT equal
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Raises:
AssertionError: If metric value equals expected value
"""
def assert_metric_diff(frozen_registry, expected_diff, metric_name: str, registry=REGISTRY, **labels):
"""
Asserts that metric_name{**labels} changed by expected_diff between
the frozen registry and current state.
Parameters:
- frozen_registry: Previously saved registry state from save_registry()
- expected_diff: Expected change in metric value (int or float)
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Raises:
AssertionError: If metric change doesn't match expected difference
"""
def assert_metric_no_diff(frozen_registry, expected_diff, metric_name: str, registry=REGISTRY, **labels):
"""
Asserts that metric_name{**labels} did NOT change by expected_diff
between the frozen registry and current state.
Parameters:
- frozen_registry: Previously saved registry state from save_registry()
- expected_diff: Difference that should NOT have occurred
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Raises:
AssertionError: If metric change matches the expected difference
"""
def assert_metric_compare(frozen_registry, predicate, metric_name: str, registry=REGISTRY, **labels):
"""
Asserts that metric_name{**labels} changed according to a provided
predicate function between the frozen registry and current state.
Parameters:
- frozen_registry: Previously saved registry state from save_registry()
- predicate: Function that takes (old_value, new_value) and returns bool
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Raises:
AssertionError: If predicate returns False for the metric change
"""Functions for managing metric registry state during testing.
def save_registry(registry=REGISTRY):
"""
Freezes a registry for later comparison.
This lets a user test changes to a metric instead of testing
the absolute value. Typical usage:
registry = save_registry()
doStuff()
assert_metric_diff(registry, 1, 'stuff_done_total')
Parameters:
- registry: Prometheus registry to freeze (default: REGISTRY)
Returns:
Frozen registry state (deep copy of metric samples)
"""Functions for retrieving metric values and information during testing.
def get_metric(metric_name: str, registry=REGISTRY, **labels):
"""
Gets the current value of a single metric.
Parameters:
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
- **labels: Label filters as keyword arguments
Returns:
Metric value (int/float) or None if metric not found
"""
def get_metrics_vector(metric_name: str, registry=REGISTRY):
"""
Returns the values for all label combinations of a given metric.
Parameters:
- metric_name: str, name of the Prometheus metric
- registry: Prometheus registry to query (default: REGISTRY)
Returns:
List of (labels_dict, value) tuples for all label combinations
"""
def get_metric_from_frozen_registry(metric_name: str, frozen_registry, **labels):
"""
Gets a single metric value from a previously frozen registry.
Parameters:
- metric_name: str, name of the Prometheus metric
- frozen_registry: Previously saved registry state
- **labels: Label filters as keyword arguments
Returns:
Metric value (int/float) or None if metric not found
"""
def get_metric_vector_from_frozen_registry(metric_name: str, frozen_registry):
"""
Gets all label combinations and values for a metric from frozen registry.
Parameters:
- metric_name: str, name of the Prometheus metric
- frozen_registry: Previously saved registry state
Returns:
List of (labels_dict, value) tuples for all label combinations
"""Helper functions for formatting metric information in test output.
def format_labels(labels: dict):
"""
Format a set of labels to Prometheus representation.
Parameters:
- labels: dict, label key-value pairs
Returns:
str, formatted labels like '{method="GET",port="80"}'
Example:
>>> format_labels({'method': 'GET', 'port': '80'})
'{method="GET",port="80"}'
"""
def format_vector(vector):
"""
Formats a list of (labels, value) tuples into human-readable representation.
Parameters:
- vector: List of (labels_dict, value) tuples
Returns:
str, formatted vector with one metric per line
"""import pytest
from django.test import TestCase
from django_prometheus.testutils import (
assert_metric_equal, assert_metric_diff, save_registry
)
class MetricsTestCase(TestCase):
def test_request_counter(self):
"""Test that HTTP requests increment the counter."""
# Save initial state
registry = save_registry()
# Make a request
response = self.client.get('/')
# Assert counter increased by 1
assert_metric_diff(
registry, 1,
'django_http_requests_total_by_method',
method='GET'
)
# Assert final count
assert_metric_equal(
1,
'django_http_requests_total_by_method',
method='GET'
)from myapp.models import User
from django_prometheus.testutils import assert_metric_diff, save_registry
class ModelMetricsTestCase(TestCase):
def test_model_operations(self):
"""Test that model operations are tracked."""
registry = save_registry()
# Create a user
User.objects.create(username='test_user')
# Assert insert counter increased
assert_metric_diff(
registry, 1,
'django_model_inserts_total',
model='user'
)
# Update the user
user = User.objects.get(username='test_user')
user.email = 'test@example.com'
user.save()
# Assert update counter increased
assert_metric_diff(
registry, 1,
'django_model_updates_total',
model='user'
)from django.core.cache import cache
from django_prometheus.testutils import assert_metric_diff, save_registry
class CacheMetricsTestCase(TestCase):
def test_cache_operations(self):
"""Test cache hit/miss tracking."""
registry = save_registry()
# Cache miss
value = cache.get('nonexistent_key')
assert value is None
assert_metric_diff(
registry, 1,
'django_cache_get_total',
backend='locmem'
)
assert_metric_diff(
registry, 1,
'django_cache_get_misses_total',
backend='locmem'
)
# Cache set and hit
cache.set('test_key', 'test_value')
value = cache.get('test_key')
assert_metric_diff(
registry, 2, # Total gets: miss + hit
'django_cache_get_total',
backend='locmem'
)
assert_metric_diff(
registry, 1,
'django_cache_get_hits_total',
backend='locmem'
)from django_prometheus.testutils import assert_metric_compare, save_registry
class CustomMetricsTestCase(TestCase):
def test_response_time_increase(self):
"""Test that response times are recorded."""
registry = save_registry()
# Perform operation that should increase response time metric
response = self.client.get('/slow-endpoint/')
# Custom predicate to check response time increased
def response_time_increased(old_val, new_val):
return (new_val or 0) > (old_val or 0)
assert_metric_compare(
registry,
response_time_increased,
'django_http_requests_latency_seconds_by_view_method',
view='slow_endpoint',
method='GET'
)from django_prometheus.testutils import get_metrics_vector, assert_metric_equal
class MultiLabelTestCase(TestCase):
def test_multiple_status_codes(self):
"""Test metrics with multiple label combinations."""
# Generate different status codes
self.client.get('/') # 200
self.client.get('/nonexistent/') # 404
# Get all status code combinations
vector = get_metrics_vector('django_http_responses_total_by_status')
# Find specific status codes
status_200_count = None
status_404_count = None
for labels, value in vector:
if labels.get('status') == '200':
status_200_count = value
elif labels.get('status') == '404':
status_404_count = value
assert status_200_count >= 1
assert status_404_count >= 1
# Or test specific combinations
assert_metric_equal(1, 'django_http_responses_total_by_status', status='200')
assert_metric_equal(1, 'django_http_responses_total_by_status', status='404')from django_prometheus.testutils import get_metric, assert_metric_equal
class ErrorMetricsTestCase(TestCase):
def test_nonexistent_metric(self):
"""Test behavior with nonexistent metrics."""
# Non-existent metric returns None
value = get_metric('nonexistent_metric')
assert value is None
# Non-existent labels return None
value = get_metric(
'django_http_requests_total_by_method',
method='NONEXISTENT'
)
assert value is None
def test_exception_tracking(self):
"""Test that exceptions are tracked in metrics."""
registry = save_registry()
# Trigger an exception in a view
with pytest.raises(Exception):
self.client.get('/error-endpoint/')
# Check exception was tracked
assert_metric_diff(
registry, 1,
'django_http_exceptions_total_by_type',
type='ValueError' # or whatever exception type
)from django_prometheus.testutils import save_registry, get_metric
import time
class PerformanceTestCase(TestCase):
def test_metric_collection_performance(self):
"""Test that metric collection doesn't significantly impact performance."""
# Measure time with metrics
start = time.time()
registry = save_registry()
for i in range(100):
self.client.get('/')
metrics_time = time.time() - start
# Verify metrics were collected
final_count = get_metric(
'django_http_requests_total_by_method',
method='GET'
)
assert final_count >= 100
# Performance assertion (adjust based on requirements)
assert metrics_time < 5.0 # Should complete in under 5 secondsclass IsolatedMetricsTestCase(TestCase):
def setUp(self):
"""Save registry state before each test."""
self.registry = save_registry()
def test_isolated_operation(self):
"""Each test starts with known metric state."""
# Test operations knowing the baseline
self.client.get('/')
assert_metric_diff(self.registry, 1, 'django_http_requests_total_by_method', method='GET')import asyncio
from django.test import TransactionTestCase
class AsyncMetricsTestCase(TransactionTestCase):
async def test_async_operations(self):
"""Test metrics with async operations."""
registry = save_registry()
# Async operations that generate metrics
await asyncio.gather(
self.async_operation_1(),
self.async_operation_2(),
)
# Assert metrics were collected
assert_metric_diff(registry, 2, 'custom_async_operations_total')from django.test import override_settings
class IntegrationTestCase(TestCase):
@override_settings(
PROMETHEUS_METRIC_NAMESPACE='test',
PROMETHEUS_EXPORT_MIGRATIONS=True
)
def test_full_integration(self):
"""Test complete monitoring integration."""
registry = save_registry()
# Complex workflow that exercises multiple metric types
user = User.objects.create(username='integration_test') # Model metrics
self.client.force_login(user) # Auth metrics
response = self.client.get('/api/data/') # HTTP metrics
cache.set('integration_key', 'value') # Cache metrics
# Verify all metric types were updated
assert_metric_diff(registry, 1, 'test_model_inserts_total', model='user')
assert_metric_diff(registry, 1, 'test_http_requests_total_by_method', method='GET')
# ... other assertionsInstall with Tessl CLI
npx tessl i tessl/pypi-django-prometheus