CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-replicate

Python client for Replicate

Pending
Overview
Eval results
Files

models-predictions.mddocs/

Models & Predictions

Comprehensive management of AI models, versions, and prediction lifecycle including creation, monitoring, cancellation, and output retrieval.

Capabilities

Model Management

Discover, retrieve, and manage AI models hosted on Replicate.

class Models:
    def get(self, name: str) -> Model:
        """
        Get a model by name.

        Parameters:
        - name: Model name in format "owner/name"

        Returns:
        Model object with metadata and version access
        """

    def list(self, **params) -> Page[Model]:
        """
        List models.

        Returns:
        Paginated list of Model objects
        """

    def search(self, query: str, **params) -> Page[Model]:
        """
        Search for models.

        Parameters:
        - query: Search query string

        Returns:
        Paginated list of matching Model objects
        """

    def create(
        self,
        owner: str,
        name: str,
        *,
        hardware: str,
        visibility: Literal["public", "private"],
        description: Optional[str] = None,
        github_url: Optional[str] = None,
        paper_url: Optional[str] = None,
        license_url: Optional[str] = None,
        cover_image_url: Optional[str] = None
    ) -> Model:
        """
        Create a new model.

        Parameters:
        - owner: Model owner username or organization
        - name: Model name
        - hardware: Hardware SKU for running the model
        - visibility: Model visibility (public or private)
        - description: Model description (optional)
        - github_url: GitHub source code URL (optional)
        - paper_url: Research paper URL (optional)
        - license_url: License URL (optional)
        - cover_image_url: Cover image URL (optional)

        Returns:
        Created Model object
        """

    async def async_create(
        self,
        owner: str,
        name: str,
        **params
    ) -> Model:
        """
        Create a new model asynchronously.

        Parameters same as create method.

        Returns:
        Created Model object
        """

Model Objects

Models represent AI models with metadata, versions, and capabilities.

class Model:
    url: str
    """The URL of the model."""

    owner: str
    """The owner of the model."""

    name: str
    """The name of the model."""

    description: Optional[str]
    """The description of the model."""

    visibility: Literal["public", "private"]
    """The visibility of the model."""

    github_url: Optional[str]
    """The GitHub URL of the model."""

    paper_url: Optional[str]
    """The URL of the paper related to the model."""

    license_url: Optional[str]
    """The URL of the license for the model."""

    run_count: int
    """The number of runs of the model."""

    cover_image_url: Optional[str]
    """The URL of the cover image for the model."""

    default_example: Optional[Prediction]
    """The default example of the model."""

    latest_version: Optional[Version]
    """The latest version of the model."""

    @property
    def id(self) -> str:
        """Return the qualified model name, in the format `owner/name`."""

    @property
    def versions(self) -> Versions:
        """Access to model versions."""

Model Versions

Specific versions of models with schemas and prediction capabilities.

class Version:
    id: str
    """The unique ID of the version."""

    created_at: str
    """When the version was created."""

    cog_version: Optional[str]
    """The version of Cog used to create this version."""

    openapi_schema: Optional[Dict[str, Any]]
    """The OpenAPI schema for the version."""

class Versions:
    def get(self, id: str) -> Version:
        """Get a specific version by ID."""

    def list(self, **params) -> Page[Version]:
        """List all versions for the model."""

Prediction Management

Create, monitor, and manage prediction requests for model execution.

class Predictions:
    def create(
        self,
        model: Optional[str] = None,
        version: Optional[str] = None,
        input: Optional[Dict[str, Any]] = None,
        *,
        stream: Optional[bool] = None,
        webhook: Optional[str] = None,
        webhook_events_filter: Optional[List[str]] = None,
        **params
    ) -> Prediction:
        """
        Create a new prediction.

        Parameters:
        - model: Model name in format "owner/name"
        - version: Specific version ID (optional if using latest)
        - input: Input parameters for the model
        - stream: Enable streaming output
        - webhook: Webhook URL for completion notification
        - webhook_events_filter: Events to trigger webhook

        Returns:
        Prediction object to monitor progress and retrieve output
        """

    def get(self, id: str) -> Prediction:
        """
        Get a prediction by ID.

        Parameters:
        - id: Prediction ID

        Returns:
        Prediction object with current status and output
        """

    def list(self, **params) -> Page[Prediction]:
        """
        List predictions.

        Returns:
        Paginated list of Prediction objects
        """

    def cancel(self, id: str) -> Prediction:
        """
        Cancel a running prediction.

        Parameters:
        - id: Prediction ID

        Returns:
        Updated Prediction object with canceled status
        """

Prediction Objects

Predictions represent model execution requests with status, input, output, and metadata.

class Prediction:
    id: str
    """The unique ID of the prediction."""

    model: str
    """Model identifier in format `owner/name`."""

    version: str
    """Version identifier used for prediction."""

    status: Literal["starting", "processing", "succeeded", "failed", "canceled"]
    """The status of the prediction."""

    input: Optional[Dict[str, Any]]
    """The input to the prediction."""

    output: Optional[Any]
    """The output of the prediction."""

    logs: Optional[str]
    """The logs of the prediction."""

    error: Optional[str]
    """The error encountered during the prediction, if any."""

    metrics: Optional[Dict[str, Any]]
    """Metrics for the prediction."""

    created_at: Optional[str]
    """When the prediction was created."""

    started_at: Optional[str]
    """When the prediction was started."""

    completed_at: Optional[str]
    """When the prediction was completed, if finished."""

    urls: Optional[Dict[str, str]]
    """URLs associated with the prediction (get, cancel)."""

    def wait(self, **params) -> "Prediction":
        """Wait for the prediction to complete."""

    def cancel(self) -> "Prediction":
        """Cancel the prediction."""

    def reload(self) -> "Prediction":
        """Reload the prediction from the API."""

    def stream(self) -> Iterator[ServerSentEvent]:
        """Stream the prediction output (if streaming enabled)."""

    @dataclass
    class Progress:
        """The progress of a prediction."""
        percentage: float

Usage Examples

Basic Model Execution

import replicate

# Simple model run
output = replicate.run(
    "stability-ai/stable-diffusion-3",
    input={"prompt": "An astronaut riding a rainbow unicorn"}
)

# Handle file output
if hasattr(output, 'read'):
    with open("output.png", "wb") as f:
        f.write(output.read())

Background Predictions

import replicate

# Create background prediction
model = replicate.models.get("kvfrans/clipdraw")
version = model.versions.get("5797a99edc939ea0e9242d5e8c9cb3bc7d125b1eac21bda852e5cb79ede2cd9b")

prediction = replicate.predictions.create(
    version=version,
    input={"prompt": "Watercolor painting of an underwater submarine"}
)

# Monitor progress
print(f"Status: {prediction.status}")
print(f"Logs: {prediction.logs}")

# Wait for completion
prediction.wait()
print(f"Final status: {prediction.status}")

# Access output
if prediction.output:
    with open("output.png", "wb") as f:
        f.write(prediction.output.read())

Webhooks

import replicate

# Create prediction with webhook
prediction = replicate.predictions.create(
    model="ai-forever/kandinsky-2.2",
    input={"prompt": "Watercolor painting of an underwater submarine"},
    webhook="https://example.com/your-webhook",
    webhook_events_filter=["completed"]
)

Model Pipelines

import replicate

# Chain models together
laionide = replicate.models.get("afiaka87/laionide-v4")
swinir = replicate.models.get("jingyunliang/swinir")

# Generate image
image = laionide.latest_version.predict(prompt="avocado armchair")

# Upscale image
upscaled_image = swinir.latest_version.predict(image=image)

Listing and Searching

import replicate

# List your predictions
predictions = replicate.predictions.list()
for prediction in predictions.results:
    print(f"ID: {prediction.id}, Status: {prediction.status}")

# Paginate through results
if predictions.next:
    next_page = replicate.predictions.list(predictions.next)

# Search for models
results = replicate.models.search("text-to-image")
for model in results.results:
    print(f"{model.owner}/{model.name}: {model.description}")

Cancellation

import replicate

# Create and cancel prediction
prediction = replicate.predictions.create(
    model="kvfrans/clipdraw",
    input={"prompt": "Complex artwork"}
)

# Cancel if needed
if prediction.status in ["starting", "processing"]:
    prediction.cancel()
    print("Prediction canceled")

Install with Tessl CLI

npx tessl i tessl/pypi-replicate

docs

client.md

collections-training.md

deployments-webhooks.md

exceptions.md

files.md

index.md

models-predictions.md

tile.json