or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

client-management.mderror-handling.mdfile-handling.mdindex.mdpredictions-jobs.mdspace-management.mdstreaming.md

predictions-jobs.mddocs/

0

# Predictions and Jobs

1

2

Synchronous and asynchronous prediction capabilities with comprehensive job management, including status tracking, result retrieval, and cancellation support.

3

4

## Capabilities

5

6

### Synchronous Predictions

7

8

Make synchronous predictions that block until completion and return results directly.

9

10

```python { .api }

11

def predict(

12

self,

13

*data,

14

api_name: str | None = None,

15

fn_index: int | None = None

16

) -> Any:

17

"""

18

Make a synchronous prediction to the Gradio app.

19

20

Parameters:

21

- *data: Input data for the prediction, types depend on the endpoint

22

- api_name: Name of the API endpoint (e.g., "/predict")

23

- fn_index: Index of the function if api_name not provided

24

25

Returns:

26

The prediction result from the Gradio app

27

28

Raises:

29

- AppError: If the Gradio app returns an error

30

- AuthenticationError: If authentication fails

31

"""

32

```

33

34

### Asynchronous Jobs

35

36

Submit asynchronous jobs for non-blocking prediction execution with comprehensive job management.

37

38

```python { .api }

39

def submit(

40

self,

41

*data,

42

api_name: str | None = None,

43

fn_index: int | None = None,

44

result_callbacks: list[Callable] | None = None

45

) -> Job:

46

"""

47

Submit an asynchronous job to the Gradio app.

48

49

Parameters:

50

- *data: Input data for the prediction

51

- api_name: Name of the API endpoint

52

- fn_index: Index of the function if api_name not provided

53

- result_callbacks: Functions to call when job completes

54

55

Returns:

56

Job object for tracking the asynchronous prediction

57

"""

58

```

59

60

### Job Management

61

62

Comprehensive job management with status tracking, result retrieval, and cancellation capabilities.

63

64

```python { .api }

65

class Job:

66

def result(self, timeout: float | None = None) -> Any:

67

"""

68

Get the final result of the job (blocking).

69

70

Parameters:

71

- timeout: Maximum time to wait for completion in seconds

72

73

Returns:

74

The final result of the job

75

76

Raises:

77

- TimeoutError: If timeout is reached before completion

78

"""

79

80

def status(self) -> StatusUpdate:

81

"""

82

Get the current status of the job.

83

84

Returns:

85

StatusUpdate dictionary containing:

86

- msg: Status message

87

- progress_data: Progress information if available

88

- success: Whether the job completed successfully

89

- time: Timestamp information

90

"""

91

92

def cancel(self) -> bool:

93

"""

94

Cancel the running job.

95

96

Returns:

97

True if cancellation was successful, False otherwise

98

"""

99

100

def outputs(self) -> list[tuple | Any]:

101

"""

102

Get all outputs generated so far by the job.

103

104

Returns:

105

List of output values generated during job execution

106

"""

107

108

def __iter__(self) -> Job:

109

"""

110

Make Job iterable for streaming results.

111

112

Returns:

113

Iterator over job outputs as they become available

114

"""

115

116

def __next__(self) -> tuple | Any:

117

"""

118

Get the next output from the job iterator.

119

120

Returns:

121

Next output value

122

123

Raises:

124

- StopIteration: When no more outputs are available

125

"""

126

127

async def __aiter__(self) -> AsyncGenerator[Update, None]:

128

"""

129

Make Job async iterable for streaming updates.

130

131

Returns:

132

Async generator yielding job updates as they become available

133

"""

134

```

135

136

### Job Status and Updates

137

138

Status tracking and update structures for monitoring job progress.

139

140

```python { .api }

141

StatusUpdate = dict[str, Any] # Contains msg, progress_data, success, time fields

142

```

143

144

## Usage Examples

145

146

### Basic Predictions

147

148

```python

149

from gradio_client import Client

150

151

client = Client("abidlabs/whisper-large-v2")

152

153

# Synchronous prediction

154

result = client.predict("audio_file.wav", api_name="/predict")

155

print(result)

156

157

# Specify by function index instead of name

158

result = client.predict("input_data", fn_index=0)

159

print(result)

160

```

161

162

### Asynchronous Jobs

163

164

```python

165

from gradio_client import Client

166

167

client = Client("abidlabs/whisper-large-v2")

168

169

# Submit asynchronous job

170

job = client.submit("audio_file.wav", api_name="/predict")

171

172

# Check status

173

status = job.status()

174

print(f"Status: {status}")

175

176

# Get result (blocks until completion)

177

result = job.result()

178

print(f"Result: {result}")

179

180

# Get result with timeout

181

try:

182

result = job.result(timeout=30.0)

183

except TimeoutError:

184

print("Job timed out")

185

job.cancel()

186

```

187

188

### Job Monitoring and Control

189

190

```python

191

from gradio_client import Client

192

import time

193

194

client = Client("abidlabs/long-running-process")

195

196

# Submit job

197

job = client.submit("large_input.data", api_name="/process")

198

199

# Monitor progress

200

while True:

201

status = job.status()

202

print(f"Status: {status.get('msg', 'Unknown')}")

203

204

if status.get('success') is not None:

205

break

206

207

time.sleep(1)

208

209

# Get all outputs

210

all_outputs = job.outputs()

211

print(f"Generated {len(all_outputs)} outputs")

212

213

# Cancel if needed

214

if not status.get('success', False):

215

cancelled = job.cancel()

216

print(f"Cancellation successful: {cancelled}")

217

```

218

219

### Streaming Results

220

221

```python

222

from gradio_client import Client

223

224

client = Client("abidlabs/streaming-app")

225

226

# Submit job and iterate over results

227

job = client.submit("streaming_input", api_name="/stream")

228

229

for output in job:

230

print(f"Received: {output}")

231

# Process each output as it arrives

232

233

# Final result

234

final_result = job.result()

235

print(f"Final: {final_result}")

236

```

237

238

### Callback Functions

239

240

```python

241

from gradio_client import Client

242

243

def on_result(result):

244

print(f"Job completed with result: {result}")

245

246

def on_error(error):

247

print(f"Job failed with error: {error}")

248

249

client = Client("abidlabs/whisper")

250

251

# Submit with result callbacks

252

job = client.submit(

253

"audio.wav",

254

api_name="/predict",

255

result_callbacks=[on_result]

256

)

257

258

# Continue with other work while job runs asynchronously

259

```