Comprehensive Python client library for Google Cloud Vertex AI, offering machine learning tools, generative AI models, and MLOps capabilities
npx @tessl/cli install tessl/pypi-google-cloud-aiplatform@1.111.0A comprehensive Python client library for Google Cloud Vertex AI, offering access to Google's integrated suite of machine learning tools and services. The library enables developers to build, deploy, and manage ML models using both AutoML and custom code approaches, supporting the entire machine learning development lifecycle from data preparation to model deployment and monitoring.
pip install google-cloud-aiplatformFor traditional Vertex AI resource management:
import google.cloud.aiplatform as aiplatform
# Initialize the SDK
aiplatform.init(project='your-project-id', location='us-central1')For modern generative AI functionality:
import vertexai
from vertexai.generative_models import GenerativeModel
from vertexai.language_models import TextGenerationModel
from vertexai.vision_models import ImageGenerationModel
# Initialize Vertex AI
vertexai.init(project='your-project-id', location='us-central1')import google.cloud.aiplatform as aiplatform
# Initialize
aiplatform.init(project='my-project', location='us-central1')
# Create a tabular dataset
dataset = aiplatform.TabularDataset.create(
display_name="my-dataset",
gcs_source="gs://my-bucket/dataset.csv"
)
# Train an AutoML model
job = aiplatform.AutoMLTabularTrainingJob(
display_name="my-training-job",
optimization_prediction_type="classification"
)
model = job.run(
dataset=dataset,
target_column="label"
)
# Deploy for predictions
endpoint = model.deploy(
deployed_model_display_name="my-model",
machine_type="n1-standard-4"
)
# Make predictions
predictions = endpoint.predict([[1.0, 2.0, 3.0]])import vertexai
from vertexai.generative_models import GenerativeModel
# Initialize
vertexai.init(project='my-project', location='us-central1')
# Use Gemini model
model = GenerativeModel('gemini-1.5-pro')
response = model.generate_content('Explain quantum computing')
print(response.text)
# Chat conversation
chat = model.start_chat()
response = chat.send_message('Hello!')
print(response.text)The library is organized into two main APIs that serve different use cases:
google.cloud.aiplatform)Resource-based API following Google Cloud patterns:
vertexai)Modern, streamlined API for AI generation:
This dual-API design allows developers to choose the right abstraction level for their needs while maintaining compatibility between both approaches.
Comprehensive dataset creation, management, and preparation for various ML tasks including tabular, image, text, video, and time series data.
class TabularDataset:
def create(cls, display_name: str, gcs_source: Union[str, Sequence[str]], **kwargs) -> 'TabularDataset': ...
def import_data(self, gcs_source: Union[str, Sequence[str]], **kwargs) -> None: ...
class ImageDataset:
def create(cls, display_name: str, gcs_source: str, import_schema_uri: str, **kwargs) -> 'ImageDataset': ...
class TextDataset:
def create(cls, display_name: str, gcs_source: Union[str, Sequence[str]], **kwargs) -> 'TextDataset': ...AutoML and custom training capabilities for various ML tasks with comprehensive job management and monitoring.
class AutoMLTabularTrainingJob:
def __init__(self, display_name: str, optimization_prediction_type: str, **kwargs): ...
def run(self, dataset: TabularDataset, target_column: str, **kwargs) -> Model: ...
class CustomTrainingJob:
def __init__(self, display_name: str, script_path: str, container_uri: str, **kwargs): ...
def run(self, dataset: Optional[Dataset] = None, **kwargs) -> Model: ...Model versioning, deployment, and serving with comprehensive endpoint management and resource optimization.
class Model:
def deploy(self, endpoint: Optional[Endpoint] = None, deployed_model_display_name: str = None, **kwargs) -> Endpoint: ...
def upload(cls, display_name: str, artifact_uri: str, serving_container_image_uri: str, **kwargs) -> 'Model': ...
class Endpoint:
def create(cls, display_name: str, **kwargs) -> 'Endpoint': ...
def predict(self, instances: List[Dict], **kwargs) -> Prediction: ...Modern generative AI capabilities including text generation, chat, multimodal interactions, and function calling.
class GenerativeModel:
def __init__(self, model_name: str, generation_config: Optional[GenerationConfig] = None, **kwargs): ...
def generate_content(self, contents: ContentsType, stream: bool = False, **kwargs) -> GenerationResponse: ...
def start_chat(self, history: Optional[List[Content]] = None, **kwargs) -> ChatSession: ...
class TextGenerationModel:
def from_pretrained(cls, model_name: str) -> 'TextGenerationModel': ...
def predict(self, prompt: str, **kwargs) -> TextGenerationResponse: ...Comprehensive vision AI capabilities including image generation, analysis, and multimodal understanding.
class ImageGenerationModel:
def from_pretrained(cls, model_name: str) -> 'ImageGenerationModel': ...
def generate_images(self, prompt: str, number_of_images: int = 1, **kwargs) -> ImageGenerationResponse: ...
class ImageCaptioningModel:
def get_captions(self, image: Image, number_of_results: int = 1, **kwargs) -> List[str]: ...Workflow orchestration, scheduling, and complex ML pipeline management with Kubeflow Pipelines integration.
class PipelineJob:
def create(cls, display_name: str, template_path: str, **kwargs) -> 'PipelineJob': ...
def run(self, service_account: Optional[str] = None, **kwargs) -> None: ...
class PipelineJobSchedule:
def create(cls, pipeline_job: PipelineJob, display_name: str, cron: str, **kwargs) -> 'PipelineJobSchedule': ...Enterprise feature management with online and offline serving, feature versioning, and monitoring.
class Featurestore:
def create(cls, featurestore_id: str, **kwargs) -> 'Featurestore': ...
def create_entity_type(self, entity_type_id: str, **kwargs) -> EntityType: ...
class Feature:
def create(cls, feature_id: str, value_type: str, entity_type: EntityType, **kwargs) -> 'Feature': ...Comprehensive experiment management, metrics logging, and artifact tracking with integration to popular ML frameworks.
def init(project: str, location: str, **kwargs) -> None: ...
def start_run(run: str, resume: bool = False, **kwargs) -> None: ...
def log_params(params: Dict[str, Union[str, int, float]]) -> None: ...
def log_metrics(metrics: Dict[str, Union[int, float]]) -> None: ...
def log_model(model, artifact_id: Optional[str] = None, **kwargs) -> None: ...
class Experiment:
def create(cls, experiment_id: str, **kwargs) -> 'Experiment': ...High-performance vector similarity search with approximate nearest neighbor capabilities for embedding-based applications.
class MatchingEngineIndex:
def create(cls, display_name: str, contents_delta_uri: str, **kwargs) -> 'MatchingEngineIndex': ...
def update_embeddings(self, contents_delta_uri: str, **kwargs) -> None: ...
class MatchingEngineIndexEndpoint:
def create(cls, display_name: str, public_endpoint_enabled: bool = False, **kwargs) -> 'MatchingEngineIndexEndpoint': ...
def match(self, deployed_index_id: str, queries: List[List[float]], **kwargs) -> List[List[MatchNeighbor]]: ...Large-scale batch inference and data processing with distributed computing integration and resource optimization.
class BatchPredictionJob:
def create(cls, job_display_name: str, model_name: str, instances_format: str, **kwargs) -> 'BatchPredictionJob': ...
def create_from_job_spec(cls, job_spec: Dict, **kwargs) -> 'BatchPredictionJob': ...# Common types used across the API
# Resource management
class Prediction:
predictions: List[Dict]
deployed_model_id: str
model_version_id: str
model_resource_name: str
explanations: Optional[List[Explanation]]
# Generative AI types
ContentsType = Union[
str,
Image,
Part,
List[Union[str, Image, Part]],
List[Content]
]
GenerationConfigType = Union[GenerationConfig, Dict[str, Any]]
SafetySettingsType = Union[SafetySetting, List[SafetySetting], Dict[str, Any]]
# Training and evaluation
class TrainingJob:
resource_name: str
display_name: str
state: JobState
create_time: datetime
start_time: Optional[datetime]
end_time: Optional[datetime]
error: Optional[Status]
# Enums
class JobState(Enum):
JOB_STATE_UNSPECIFIED = 0
JOB_STATE_QUEUED = 1
JOB_STATE_PENDING = 2
JOB_STATE_RUNNING = 3
JOB_STATE_SUCCEEDED = 4
JOB_STATE_FAILED = 5
JOB_STATE_CANCELLING = 6
JOB_STATE_CANCELLED = 7
JOB_STATE_PAUSED = 8
JOB_STATE_EXPIRED = 9