Python client for Together's Cloud Platform providing comprehensive AI model APIs
File upload, listing, retrieval, and deletion operations for fine-tuning datasets, batch processing workflows, and data management. Supports various file formats and provides comprehensive file metadata and content access.
Upload files for fine-tuning and batch processing operations.
def upload(
file: str,
purpose: Optional[str] = None
) -> FileResponse:
"""
Upload a file for use with fine-tuning or batch processing.
Args:
file: Path to the file to upload
purpose: File purpose ('fine-tune', 'batch-api', etc.)
Returns:
FileResponse with file metadata
"""List all uploaded files with metadata and filtering options.
def list() -> FileList:
"""
List all uploaded files.
Returns:
FileList containing file objects
"""Retrieve metadata and content for specific files.
def retrieve(id: str) -> FileObject:
"""
Retrieve file metadata by ID.
Args:
id: File identifier
Returns:
FileObject with file metadata
"""
def retrieve_content(
id: str,
output: Optional[str] = None
) -> str:
"""
Retrieve file content by ID.
Args:
id: File identifier
output: Optional output file path
Returns:
File content as string
"""Delete uploaded files to manage storage and cleanup.
def delete(id: str) -> FileDeleteResponse:
"""
Delete a file by ID.
Args:
id: File identifier
Returns:
FileDeleteResponse confirming deletion
"""All file operations support asynchronous execution for concurrent processing.
async def upload(file: str, purpose: Optional[str] = None) -> FileResponse: ...
async def list() -> FileList: ...
async def retrieve(id: str) -> FileObject: ...
async def retrieve_content(id: str, output: Optional[str] = None) -> str: ...
async def delete(id: str) -> FileDeleteResponse: ...from together import Together
client = Together()
# Upload a fine-tuning dataset
response = client.files.upload(
file="training_data.jsonl",
purpose="fine-tune"
)
print(f"Uploaded file ID: {response.id}")
print(f"File name: {response.filename}")
print(f"File size: {response.bytes} bytes")
print(f"Purpose: {response.purpose}")file_list = client.files.list()
print(f"Total files: {len(file_list.data)}")
for file_obj in file_list.data:
print(f"ID: {file_obj.id}")
print(f"Name: {file_obj.filename}")
print(f"Size: {file_obj.bytes} bytes")
print(f"Purpose: {file_obj.purpose}")
print(f"Created: {file_obj.created_at}")
print("---")file_id = "file-d0d318cb-b7d9-493a-bd70-1cfe089d3815"
# Get file metadata
file_obj = client.files.retrieve(file_id)
print(f"File: {file_obj.filename}")
print(f"Status: {file_obj.status}")
print(f"Size: {file_obj.bytes} bytes")
# Get file content
content = client.files.retrieve_content(file_id)
print(f"Content preview: {content[:200]}...")file_id = "file-d0d318cb-b7d9-493a-bd70-1cfe089d3815"
# Download to local file
content = client.files.retrieve_content(
id=file_id,
output="downloaded_file.jsonl"
)
print(f"File downloaded to: downloaded_file.jsonl")
print(f"Content size: {len(content)} characters")import json
import os
def prepare_fine_tuning_data(conversations: list, output_file: str):
"""Prepare conversation data for fine-tuning."""
with open(output_file, 'w') as f:
for conversation in conversations:
training_example = {
"messages": conversation["messages"],
"model": "meta-llama/Llama-3.2-3B-Instruct"
}
f.write(json.dumps(training_example) + '\n')
return output_file
def upload_and_track_file(client: Together, file_path: str, purpose: str = "fine-tune"):
"""Upload file and return tracking information."""
response = client.files.upload(file=file_path, purpose=purpose)
tracking_info = {
"file_id": response.id,
"filename": response.filename,
"size_bytes": response.bytes,
"purpose": response.purpose,
"status": response.status,
"local_path": file_path
}
return tracking_info
# Example usage
sample_conversations = [
{
"messages": [
{"role": "user", "content": "What is Python?"},
{"role": "assistant", "content": "Python is a high-level programming language..."}
]
},
{
"messages": [
{"role": "user", "content": "Explain machine learning"},
{"role": "assistant", "content": "Machine learning is a subset of AI..."}
]
}
]
# Prepare and upload training data
data_file = prepare_fine_tuning_data(sample_conversations, "training_data.jsonl")
file_info = upload_and_track_file(client, data_file)
print(f"Training file uploaded: {file_info['file_id']}")
print(f"Ready for fine-tuning with file: {file_info['filename']}")def upload_multiple_files(client: Together, file_paths: list, purpose: str = "batch-api"):
"""Upload multiple files and return their IDs."""
uploaded_files = []
for file_path in file_paths:
if os.path.exists(file_path):
response = client.files.upload(file=file_path, purpose=purpose)
uploaded_files.append({
"file_id": response.id,
"filename": response.filename,
"local_path": file_path,
"size": response.bytes
})
print(f"Uploaded: {response.filename} ({response.id})")
else:
print(f"File not found: {file_path}")
return uploaded_files
# Upload batch processing files
batch_files = [
"batch_input_1.jsonl",
"batch_input_2.jsonl",
"batch_input_3.jsonl"
]
uploaded = upload_multiple_files(client, batch_files, "batch-api")
print(f"Successfully uploaded {len(uploaded)} files for batch processing")def cleanup_old_files(client: Together, days_old: int = 30):
"""Delete files older than specified days."""
import time
current_time = int(time.time())
cutoff_time = current_time - (days_old * 24 * 60 * 60)
file_list = client.files.list()
deleted_files = []
for file_obj in file_list.data:
if file_obj.created_at < cutoff_time:
try:
client.files.delete(file_obj.id)
deleted_files.append(file_obj.filename)
print(f"Deleted: {file_obj.filename}")
except Exception as e:
print(f"Failed to delete {file_obj.filename}: {e}")
return deleted_files
# Clean up files older than 30 days
deleted = cleanup_old_files(client, days_old=30)
print(f"Cleaned up {len(deleted)} old files")import asyncio
from together import AsyncTogether
async def async_file_operations():
client = AsyncTogether()
# Upload files concurrently
file_paths = ["data1.jsonl", "data2.jsonl", "data3.jsonl"]
upload_tasks = [
client.files.upload(file=path, purpose="fine-tune")
for path in file_paths
if os.path.exists(path)
]
uploaded_files = await asyncio.gather(*upload_tasks, return_exceptions=True)
# Process results
successful_uploads = []
for i, result in enumerate(uploaded_files):
if isinstance(result, Exception):
print(f"Failed to upload {file_paths[i]}: {result}")
else:
successful_uploads.append(result)
print(f"Uploaded: {result.filename} ({result.id})")
return successful_uploads
# Run async file operations
uploaded_files = asyncio.run(async_file_operations())class FileRequest:
file: str
purpose: Optional[str] = Noneclass FileResponse:
id: str
object: str
bytes: int
created_at: int
filename: str
purpose: str
status: str
status_details: Optional[str] = None
class FileObject:
id: str
object: str
bytes: int
created_at: int
filename: str
purpose: str
status: str
status_details: Optional[str] = None
class FileList:
object: str
data: List[FileObject]
class FileDeleteResponse:
id: str
object: str
deleted: boolclass FilePurpose:
FINE_TUNE = "fine-tune"
BATCH_API = "batch-api"
ASSISTANTS = "assistants"
VISION = "vision"
class FileType:
JSONL = "jsonl"
JSON = "json"
TXT = "txt"
CSV = "csv"Install with Tessl CLI
npx tessl i tessl/pypi-together