CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-mistralai

Python Client SDK for the Mistral AI API with chat completions, embeddings, fine-tuning, and agent capabilities.

Pending
Overview
Eval results
Files

batch.mddocs/

Batch Processing

Submit and manage batch processing jobs for handling large volumes of requests efficiently. Batch processing is ideal for processing many requests asynchronously at lower cost.

Capabilities

Create Batch Job

Submit a batch processing job with multiple requests.

def create(
    input_files: List[str],
    endpoint: str,
    completion_window: str,
    metadata: Optional[dict] = None,
    **kwargs
) -> BatchJobOut:
    """
    Create a batch processing job.

    Parameters:
    - input_files: List of file IDs containing batch requests
    - endpoint: API endpoint to process ("/v1/chat/completions", "/v1/embeddings")
    - completion_window: Time window for completion ("24h")
    - metadata: Optional metadata for the batch

    Returns:
    BatchJobOut with batch job details
    """

def list(
    limit: Optional[int] = None,
    after: Optional[str] = None,
    **kwargs
) -> BatchJobsOut:
    """
    List batch processing jobs.

    Parameters:
    - limit: Maximum number of jobs to return
    - after: Cursor for pagination

    Returns:
    BatchJobsOut with list of batch jobs
    """

def get(batch_id: str, **kwargs) -> BatchJobOut:
    """
    Retrieve a batch processing job.

    Parameters:
    - batch_id: Unique identifier of the batch job

    Returns:
    BatchJobOut with detailed batch information
    """

def cancel(batch_id: str, **kwargs) -> BatchJobOut:
    """
    Cancel a batch processing job.

    Parameters:
    - batch_id: Unique identifier of the batch to cancel

    Returns:
    Updated batch job with cancelled status
    """

Usage Examples

Prepare Batch Requests

import json

# Create batch requests file
batch_requests = []

# Add multiple chat completion requests
for i in range(100):
    request = {
        "custom_id": f"request-{i}",
        "method": "POST",
        "url": "/v1/chat/completions",
        "body": {
            "model": "mistral-small-latest",
            "messages": [
                {"role": "user", "content": f"Summarize topic number {i}"}
            ],
            "max_tokens": 100
        }
    }
    batch_requests.append(request)

# Save to JSONL file
with open("batch_requests.jsonl", "w") as f:
    for request in batch_requests:
        f.write(json.dumps(request) + "\n")

Submit Batch Job

from mistralai import Mistral

client = Mistral(api_key="your-api-key")

# Upload batch requests file
upload_result = client.files.upload(
    file="batch_requests.jsonl",
    purpose="batch"
)

# Create batch job
batch_job = client.batch.create(
    input_files=[upload_result.id],
    endpoint="/v1/chat/completions",
    completion_window="24h",
    metadata={"description": "Batch summarization job"}
)

print(f"Created batch job: {batch_job.id}")
print(f"Status: {batch_job.status}")

Monitor Batch Progress

import time

# Monitor batch job
while True:
    batch_status = client.batch.get(batch_job.id)
    print(f"Batch status: {batch_status.status}")
    print(f"Completed: {batch_status.request_counts.completed}")
    print(f"Failed: {batch_status.request_counts.failed}")
    print(f"Total: {batch_status.request_counts.total}")
    
    if batch_status.status in ["completed", "failed", "cancelled"]:
        break
    
    time.sleep(60)  # Check every minute

# Download results
if batch_status.status == "completed" and batch_status.output_file_id:
    results = client.files.download(batch_status.output_file_id)
    
    # Save results
    with open("batch_results.jsonl", "wb") as f:
        f.write(results)
    
    print("Batch processing completed successfully!")

Process Results

import json

# Read and process batch results
with open("batch_results.jsonl", "r") as f:
    for line in f:
        result = json.loads(line)
        custom_id = result["custom_id"]
        
        if result["response"]["status_code"] == 200:
            response_body = result["response"]["body"]
            content = response_body["choices"][0]["message"]["content"]
            print(f"{custom_id}: {content[:100]}...")
        else:
            error = result["response"]["body"]
            print(f"{custom_id}: Error - {error}")

Types

Batch Job Types

class BatchJobOut:
    id: str
    object: str
    endpoint: str
    errors: Optional[dict]
    input_file_id: str
    completion_window: str
    status: str
    output_file_id: Optional[str]
    error_file_id: Optional[str]
    created_at: int
    in_progress_at: Optional[int]
    expires_at: int
    finalizing_at: Optional[int]
    completed_at: Optional[int]
    failed_at: Optional[int]
    expired_at: Optional[int]
    cancelling_at: Optional[int]
    cancelled_at: Optional[int]
    request_counts: BatchJobRequestCounts
    metadata: Optional[dict]

class BatchJobsOut:
    object: str
    data: List[BatchJobOut]
    first_id: Optional[str]
    last_id: Optional[str]
    has_more: bool

class BatchJobRequestCounts:
    total: int
    completed: int
    failed: int

Batch Input Format

class BatchJobIn:
    input_files: List[str]
    endpoint: str
    completion_window: str
    metadata: Optional[dict]

Status Values

  • validating: Batch is being validated
  • in_progress: Batch is being processed
  • finalizing: Batch processing is finishing
  • completed: Batch completed successfully
  • failed: Batch failed to process
  • expired: Batch expired before completion
  • cancelling: Batch is being cancelled
  • cancelled: Batch was cancelled

Error Handling

class BatchError:
    code: Optional[str]
    message: Optional[str]
    param: Optional[str]
    line: Optional[int]

Install with Tessl CLI

npx tessl i tessl/pypi-mistralai

docs

agents.md

audio.md

batch.md

beta.md

chat-completions.md

classification.md

embeddings.md

files.md

fim.md

fine-tuning.md

index.md

models.md

ocr.md

tile.json