or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

asset-management.mdbackground-callbacks.mdcallback-system.mdcomponent-libraries.mdcore-application.mdindex.mdmulti-page-apps.mdspecial-values.md

background-callbacks.mddocs/

0

# Background Callbacks

1

2

Background callbacks enable long-running operations to execute asynchronously without blocking the user interface. They support progress reporting, cancellation, and caching with Celery or diskcache backends.

3

4

## Capabilities

5

6

### Background Callback Managers

7

8

Manage background task execution and state persistence.

9

10

```python { .api }

11

class CeleryManager:

12

def __init__(self, celery_app):

13

"""

14

Background callback manager using Celery for distributed task processing.

15

16

Parameters:

17

- celery_app: Configured Celery application instance

18

"""

19

20

class DiskcacheManager:

21

def __init__(

22

self,

23

cache_by: str = "session",

24

cache: Any = None,

25

expire: int = 60

26

):

27

"""

28

Background callback manager using diskcache for simple task processing.

29

30

Parameters:

31

- cache_by: Cache key strategy ('session', 'user', or custom function)

32

- cache: Diskcache Cache instance (created if None)

33

- expire: Cache expiration time in seconds

34

"""

35

```

36

37

### Background Callback Decorator

38

39

Register callbacks for background execution with progress and cancellation support.

40

41

```python { .api }

42

@callback(

43

output: Union[Output, List[Output]],

44

inputs: Union[Input, List[Input]] = None,

45

state: Union[State, List[State]] = None,

46

background: bool = True,

47

manager: Union[CeleryManager, DiskcacheManager] = None,

48

running: List[Tuple[Output, Any]] = None,

49

progress: List[Output] = None,

50

progress_default: List[Any] = None,

51

cancel: List[Input] = None,

52

cache_by: List[Union[Input, State]] = None,

53

**kwargs

54

):

55

"""

56

Background callback decorator with progress reporting.

57

58

Parameters:

59

- background: Enable background execution (must be True)

60

- manager: Background callback manager instance

61

- running: List of (Output, value) pairs to update while running

62

- progress: List of Output components for progress reporting

63

- progress_default: Default values for progress outputs

64

- cancel: List of Input components that can cancel execution

65

- cache_by: Components to use for caching results

66

"""

67

```

68

69

## Usage Examples

70

71

### Basic Background Callback with Diskcache

72

73

```python

74

from dash import Dash, html, dcc, callback, Input, Output

75

from dash.long_callback import DiskcacheManager

76

import diskcache

77

import time

78

79

# Setup cache and manager

80

cache = diskcache.Cache("./cache")

81

background_callback_manager = DiskcacheManager(cache)

82

83

app = Dash(__name__)

84

85

app.layout = html.Div([

86

html.H1("Background Callback Demo"),

87

html.Button("Start Long Task", id="start-btn"),

88

html.Div(id="result"),

89

html.Div(id="status")

90

])

91

92

@callback(

93

Output("result", "children"),

94

Input("start-btn", "n_clicks"),

95

background=True,

96

manager=background_callback_manager,

97

running=[

98

(Output("status", "children"), "Task is running..."),

99

(Output("start-btn", "disabled"), True)

100

],

101

prevent_initial_call=True

102

)

103

def long_running_task(n_clicks):

104

# Simulate long-running operation

105

time.sleep(10)

106

return f"Task completed! Button was clicked {n_clicks} times."

107

```

108

109

### Background Callback with Progress Reporting

110

111

```python

112

import time

113

114

app.layout = html.Div([

115

html.Button("Start Task", id="start-btn"),

116

dcc.Progress(id="progress-bar", value=0),

117

html.Div(id="progress-text"),

118

html.Div(id="result"),

119

html.Button("Cancel", id="cancel-btn", disabled=True)

120

])

121

122

@callback(

123

Output("result", "children"),

124

Input("start-btn", "n_clicks"),

125

background=True,

126

manager=background_callback_manager,

127

running=[

128

(Output("progress-text", "children"), "Task running..."),

129

(Output("start-btn", "disabled"), True),

130

(Output("cancel-btn", "disabled"), False)

131

],

132

progress=[

133

Output("progress-bar", "value"),

134

Output("progress-bar", "max")

135

],

136

progress_default=[0, 100],

137

cancel=[Input("cancel-btn", "n_clicks")],

138

prevent_initial_call=True

139

)

140

def task_with_progress(set_progress, n_clicks):

141

total_steps = 100

142

143

for i in range(total_steps):

144

# Update progress

145

set_progress((i, total_steps))

146

147

# Simulate work

148

time.sleep(0.1)

149

150

# Final progress update

151

set_progress((total_steps, total_steps))

152

153

return "Task completed successfully!"

154

```

155

156

### Celery Background Callback

157

158

```python

159

from celery import Celery

160

from dash.long_callback import CeleryManager

161

162

# Setup Celery

163

celery_app = Celery(__name__, broker='redis://localhost:6379/0')

164

background_callback_manager = CeleryManager(celery_app)

165

166

@callback(

167

Output("result", "children"),

168

Input("start-btn", "n_clicks"),

169

background=True,

170

manager=background_callback_manager,

171

running=[(Output("status", "children"), "Processing with Celery...")],

172

prevent_initial_call=True

173

)

174

def celery_background_task(n_clicks):

175

# This runs in a Celery worker process

176

import requests

177

178

# Example: fetch data from multiple APIs

179

results = []

180

apis = ['https://api1.com/data', 'https://api2.com/data', 'https://api3.com/data']

181

182

for api in apis:

183

try:

184

response = requests.get(api, timeout=30)

185

results.append(response.json())

186

except Exception as e:

187

results.append(f"Error: {str(e)}")

188

189

return f"Fetched data from {len(results)} APIs"

190

```

191

192

### Cached Background Callback

193

194

```python

195

@callback(

196

Output("cached-result", "children"),

197

[Input("param1", "value"), Input("param2", "value")],

198

State("expensive-data", "data"),

199

background=True,

200

manager=background_callback_manager,

201

cache_by=[Input("param1", "value"), Input("param2", "value")],

202

running=[(Output("loading", "children"), "Computing...")],

203

prevent_initial_call=True

204

)

205

def expensive_computation(param1, param2, data):

206

# This expensive computation will be cached based on param1 and param2

207

# Results are reused for identical parameter combinations

208

209

time.sleep(5) # Simulate expensive computation

210

211

result = sum(item * param1 * param2 for item in data)

212

return f"Computed result: {result}"

213

```

214

215

### File Processing with Progress

216

217

```python

218

import pandas as pd

219

220

app.layout = html.Div([

221

dcc.Upload(id="upload-data", children=html.Button("Upload CSV")),

222

html.Button("Process File", id="process-btn", disabled=True),

223

dcc.Progress(id="progress", value=0),

224

html.Div(id="progress-info"),

225

html.Div(id="processing-result")

226

])

227

228

@callback(

229

Output("processing-result", "children"),

230

Input("process-btn", "n_clicks"),

231

State("upload-data", "contents"),

232

background=True,

233

manager=background_callback_manager,

234

running=[

235

(Output("process-btn", "disabled"), True),

236

(Output("progress-info", "children"), "Processing file...")

237

],

238

progress=[Output("progress", "value"), Output("progress", "max")],

239

progress_default=[0, 100],

240

prevent_initial_call=True

241

)

242

def process_large_file(set_progress, n_clicks, file_contents):

243

if not file_contents:

244

return "No file uploaded"

245

246

# Decode and process file

247

import base64

248

import io

249

250

content_string = file_contents.split(',')[1]

251

decoded = base64.b64decode(content_string)

252

df = pd.read_csv(io.StringIO(decoded.decode('utf-8')))

253

254

total_rows = len(df)

255

processed_rows = 0

256

results = []

257

258

# Process in chunks with progress updates

259

chunk_size = max(1, total_rows // 100)

260

261

for i in range(0, total_rows, chunk_size):

262

chunk = df.iloc[i:i+chunk_size]

263

264

# Simulate processing

265

time.sleep(0.1)

266

processed_chunk = chunk.sum(numeric_only=True)

267

results.append(processed_chunk)

268

269

processed_rows += len(chunk)

270

set_progress((processed_rows, total_rows))

271

272

return f"Processed {total_rows} rows successfully"

273

```

274

275

## Types

276

277

```python { .api }

278

BackgroundCallbackManager = Union[CeleryManager, DiskcacheManager]

279

ProgressSetter = Callable[[Tuple[int, int]], None]

280

CacheByFunction = Callable[..., str]

281

BackgroundCallbackFunction = Callable[..., Any]

282

```