Python library for easily interacting with trained machine learning models
Synchronous and asynchronous prediction capabilities with comprehensive job management, including status tracking, result retrieval, and cancellation support.
Make synchronous predictions that block until completion and return results directly.
def predict(
self,
*data,
api_name: str | None = None,
fn_index: int | None = None
) -> Any:
"""
Make a synchronous prediction to the Gradio app.
Parameters:
- *data: Input data for the prediction, types depend on the endpoint
- api_name: Name of the API endpoint (e.g., "/predict")
- fn_index: Index of the function if api_name not provided
Returns:
The prediction result from the Gradio app
Raises:
- AppError: If the Gradio app returns an error
- AuthenticationError: If authentication fails
"""Submit asynchronous jobs for non-blocking prediction execution with comprehensive job management.
def submit(
self,
*data,
api_name: str | None = None,
fn_index: int | None = None,
result_callbacks: list[Callable] | None = None
) -> Job:
"""
Submit an asynchronous job to the Gradio app.
Parameters:
- *data: Input data for the prediction
- api_name: Name of the API endpoint
- fn_index: Index of the function if api_name not provided
- result_callbacks: Functions to call when job completes
Returns:
Job object for tracking the asynchronous prediction
"""Comprehensive job management with status tracking, result retrieval, and cancellation capabilities.
class Job:
def result(self, timeout: float | None = None) -> Any:
"""
Get the final result of the job (blocking).
Parameters:
- timeout: Maximum time to wait for completion in seconds
Returns:
The final result of the job
Raises:
- TimeoutError: If timeout is reached before completion
"""
def status(self) -> StatusUpdate:
"""
Get the current status of the job.
Returns:
StatusUpdate dictionary containing:
- msg: Status message
- progress_data: Progress information if available
- success: Whether the job completed successfully
- time: Timestamp information
"""
def cancel(self) -> bool:
"""
Cancel the running job.
Returns:
True if cancellation was successful, False otherwise
"""
def outputs(self) -> list[tuple | Any]:
"""
Get all outputs generated so far by the job.
Returns:
List of output values generated during job execution
"""
def __iter__(self) -> Job:
"""
Make Job iterable for streaming results.
Returns:
Iterator over job outputs as they become available
"""
def __next__(self) -> tuple | Any:
"""
Get the next output from the job iterator.
Returns:
Next output value
Raises:
- StopIteration: When no more outputs are available
"""
async def __aiter__(self) -> AsyncGenerator[Update, None]:
"""
Make Job async iterable for streaming updates.
Returns:
Async generator yielding job updates as they become available
"""Status tracking and update structures for monitoring job progress.
StatusUpdate = dict[str, Any] # Contains msg, progress_data, success, time fieldsfrom gradio_client import Client
client = Client("abidlabs/whisper-large-v2")
# Synchronous prediction
result = client.predict("audio_file.wav", api_name="/predict")
print(result)
# Specify by function index instead of name
result = client.predict("input_data", fn_index=0)
print(result)from gradio_client import Client
client = Client("abidlabs/whisper-large-v2")
# Submit asynchronous job
job = client.submit("audio_file.wav", api_name="/predict")
# Check status
status = job.status()
print(f"Status: {status}")
# Get result (blocks until completion)
result = job.result()
print(f"Result: {result}")
# Get result with timeout
try:
result = job.result(timeout=30.0)
except TimeoutError:
print("Job timed out")
job.cancel()from gradio_client import Client
import time
client = Client("abidlabs/long-running-process")
# Submit job
job = client.submit("large_input.data", api_name="/process")
# Monitor progress
while True:
status = job.status()
print(f"Status: {status.get('msg', 'Unknown')}")
if status.get('success') is not None:
break
time.sleep(1)
# Get all outputs
all_outputs = job.outputs()
print(f"Generated {len(all_outputs)} outputs")
# Cancel if needed
if not status.get('success', False):
cancelled = job.cancel()
print(f"Cancellation successful: {cancelled}")from gradio_client import Client
client = Client("abidlabs/streaming-app")
# Submit job and iterate over results
job = client.submit("streaming_input", api_name="/stream")
for output in job:
print(f"Received: {output}")
# Process each output as it arrives
# Final result
final_result = job.result()
print(f"Final: {final_result}")from gradio_client import Client
def on_result(result):
print(f"Job completed with result: {result}")
def on_error(error):
print(f"Job failed with error: {error}")
client = Client("abidlabs/whisper")
# Submit with result callbacks
job = client.submit(
"audio.wav",
api_name="/predict",
result_callbacks=[on_result]
)
# Continue with other work while job runs asynchronouslyInstall with Tessl CLI
npx tessl i tessl/pypi-gradio-client