Python web application framework for building reactive analytical web apps without JavaScript
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Background callbacks enable long-running operations to execute asynchronously without blocking the user interface. They support progress reporting, cancellation, and caching with Celery or diskcache backends.
Manage background task execution and state persistence.
class CeleryManager:
def __init__(self, celery_app):
"""
Background callback manager using Celery for distributed task processing.
Parameters:
- celery_app: Configured Celery application instance
"""
class DiskcacheManager:
def __init__(
self,
cache_by: str = "session",
cache: Any = None,
expire: int = 60
):
"""
Background callback manager using diskcache for simple task processing.
Parameters:
- cache_by: Cache key strategy ('session', 'user', or custom function)
- cache: Diskcache Cache instance (created if None)
- expire: Cache expiration time in seconds
"""Register callbacks for background execution with progress and cancellation support.
@callback(
output: Union[Output, List[Output]],
inputs: Union[Input, List[Input]] = None,
state: Union[State, List[State]] = None,
background: bool = True,
manager: Union[CeleryManager, DiskcacheManager] = None,
running: List[Tuple[Output, Any]] = None,
progress: List[Output] = None,
progress_default: List[Any] = None,
cancel: List[Input] = None,
cache_by: List[Union[Input, State]] = None,
**kwargs
):
"""
Background callback decorator with progress reporting.
Parameters:
- background: Enable background execution (must be True)
- manager: Background callback manager instance
- running: List of (Output, value) pairs to update while running
- progress: List of Output components for progress reporting
- progress_default: Default values for progress outputs
- cancel: List of Input components that can cancel execution
- cache_by: Components to use for caching results
"""from dash import Dash, html, dcc, callback, Input, Output
from dash.long_callback import DiskcacheManager
import diskcache
import time
# Setup cache and manager
cache = diskcache.Cache("./cache")
background_callback_manager = DiskcacheManager(cache)
app = Dash(__name__)
app.layout = html.Div([
html.H1("Background Callback Demo"),
html.Button("Start Long Task", id="start-btn"),
html.Div(id="result"),
html.Div(id="status")
])
@callback(
Output("result", "children"),
Input("start-btn", "n_clicks"),
background=True,
manager=background_callback_manager,
running=[
(Output("status", "children"), "Task is running..."),
(Output("start-btn", "disabled"), True)
],
prevent_initial_call=True
)
def long_running_task(n_clicks):
# Simulate long-running operation
time.sleep(10)
return f"Task completed! Button was clicked {n_clicks} times."import time
app.layout = html.Div([
html.Button("Start Task", id="start-btn"),
dcc.Progress(id="progress-bar", value=0),
html.Div(id="progress-text"),
html.Div(id="result"),
html.Button("Cancel", id="cancel-btn", disabled=True)
])
@callback(
Output("result", "children"),
Input("start-btn", "n_clicks"),
background=True,
manager=background_callback_manager,
running=[
(Output("progress-text", "children"), "Task running..."),
(Output("start-btn", "disabled"), True),
(Output("cancel-btn", "disabled"), False)
],
progress=[
Output("progress-bar", "value"),
Output("progress-bar", "max")
],
progress_default=[0, 100],
cancel=[Input("cancel-btn", "n_clicks")],
prevent_initial_call=True
)
def task_with_progress(set_progress, n_clicks):
total_steps = 100
for i in range(total_steps):
# Update progress
set_progress((i, total_steps))
# Simulate work
time.sleep(0.1)
# Final progress update
set_progress((total_steps, total_steps))
return "Task completed successfully!"from celery import Celery
from dash.long_callback import CeleryManager
# Setup Celery
celery_app = Celery(__name__, broker='redis://localhost:6379/0')
background_callback_manager = CeleryManager(celery_app)
@callback(
Output("result", "children"),
Input("start-btn", "n_clicks"),
background=True,
manager=background_callback_manager,
running=[(Output("status", "children"), "Processing with Celery...")],
prevent_initial_call=True
)
def celery_background_task(n_clicks):
# This runs in a Celery worker process
import requests
# Example: fetch data from multiple APIs
results = []
apis = ['https://api1.com/data', 'https://api2.com/data', 'https://api3.com/data']
for api in apis:
try:
response = requests.get(api, timeout=30)
results.append(response.json())
except Exception as e:
results.append(f"Error: {str(e)}")
return f"Fetched data from {len(results)} APIs"@callback(
Output("cached-result", "children"),
[Input("param1", "value"), Input("param2", "value")],
State("expensive-data", "data"),
background=True,
manager=background_callback_manager,
cache_by=[Input("param1", "value"), Input("param2", "value")],
running=[(Output("loading", "children"), "Computing...")],
prevent_initial_call=True
)
def expensive_computation(param1, param2, data):
# This expensive computation will be cached based on param1 and param2
# Results are reused for identical parameter combinations
time.sleep(5) # Simulate expensive computation
result = sum(item * param1 * param2 for item in data)
return f"Computed result: {result}"import pandas as pd
app.layout = html.Div([
dcc.Upload(id="upload-data", children=html.Button("Upload CSV")),
html.Button("Process File", id="process-btn", disabled=True),
dcc.Progress(id="progress", value=0),
html.Div(id="progress-info"),
html.Div(id="processing-result")
])
@callback(
Output("processing-result", "children"),
Input("process-btn", "n_clicks"),
State("upload-data", "contents"),
background=True,
manager=background_callback_manager,
running=[
(Output("process-btn", "disabled"), True),
(Output("progress-info", "children"), "Processing file...")
],
progress=[Output("progress", "value"), Output("progress", "max")],
progress_default=[0, 100],
prevent_initial_call=True
)
def process_large_file(set_progress, n_clicks, file_contents):
if not file_contents:
return "No file uploaded"
# Decode and process file
import base64
import io
content_string = file_contents.split(',')[1]
decoded = base64.b64decode(content_string)
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')))
total_rows = len(df)
processed_rows = 0
results = []
# Process in chunks with progress updates
chunk_size = max(1, total_rows // 100)
for i in range(0, total_rows, chunk_size):
chunk = df.iloc[i:i+chunk_size]
# Simulate processing
time.sleep(0.1)
processed_chunk = chunk.sum(numeric_only=True)
results.append(processed_chunk)
processed_rows += len(chunk)
set_progress((processed_rows, total_rows))
return f"Processed {total_rows} rows successfully"BackgroundCallbackManager = Union[CeleryManager, DiskcacheManager]
ProgressSetter = Callable[[Tuple[int, int]], None]
CacheByFunction = Callable[..., str]
BackgroundCallbackFunction = Callable[..., Any]Install with Tessl CLI
npx tessl i tessl/pypi-dash