Client library for the Google Ads API providing comprehensive access to advertising management, reporting, and analytics capabilities.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
High-volume batch processing including batch job management, offline user data jobs, and asynchronous operation handling for large-scale account management. The Google Ads API provides robust batch processing capabilities for handling thousands of operations efficiently.
Batch job management operations for processing large numbers of mutate operations asynchronously with comprehensive error handling and progress tracking.
def create_batch_job(
self,
request: Optional[CreateBatchJobRequest] = None,
customer_id: Optional[str] = None
) -> BatchJob:
"""
Create a new batch job for processing multiple operations.
Args:
request: The request object
customer_id: Required customer ID
Returns:
BatchJob resource with job details
"""
def mutate_batch_job(
self,
request: Optional[MutateBatchJobRequest] = None,
customer_id: Optional[str] = None,
operation: Optional[BatchJobOperation] = None
) -> MutateBatchJobResponse:
"""
Update batch job settings.
Args:
request: The request object
customer_id: Required customer ID
operation: Batch job operation
Returns:
MutateBatchJobResponse with results
"""
def get_batch_job(
self,
request: Optional[GetBatchJobRequest] = None,
resource_name: Optional[str] = None
) -> BatchJob:
"""
Retrieve batch job by resource name.
Args:
request: The request object
resource_name: Batch job resource name
Returns:
BatchJob resource object
"""
def list_batch_job_results(
self,
request: Optional[ListBatchJobResultsRequest] = None,
resource_name: Optional[str] = None,
page_size: Optional[int] = None,
page_token: Optional[str] = None
response_content_type: Optional[ResponseContentTypeEnum.ResponseContentType] = None
) -> ListBatchJobResultsResponse:
"""
List results from a completed batch job.
Args:
request: The request object
resource_name: Batch job resource name
page_size: Number of results per page
page_token: Pagination token
response_content_type: Response content type
Returns:
ListBatchJobResultsResponse with operation results
"""
def add_batch_job_operations(
self,
request: Optional[AddBatchJobOperationsRequest] = None,
resource_name: Optional[str] = None,
sequence_token: Optional[str] = None,
mutate_operations: Optional[Sequence[MutateOperation]] = None
) -> AddBatchJobOperationsResponse:
"""
Add operations to a batch job.
Args:
request: The request object
resource_name: Batch job resource name
sequence_token: Token for operation sequencing
mutate_operations: List of mutate operations to add
Returns:
AddBatchJobOperationsResponse with sequence token
"""
def run_batch_job(
self,
request: Optional[RunBatchJobRequest] = None,
resource_name: Optional[str] = None
) -> Operation:
"""
Start execution of a batch job.
Args:
request: The request object
resource_name: Batch job resource name
Returns:
Operation representing the batch job execution
"""Offline user data job operations for processing large-scale customer match uploads, user list updates, and audience data synchronization.
def create_offline_user_data_job(
self,
request: Optional[CreateOfflineUserDataJobRequest] = None,
customer_id: Optional[str] = None,
job: Optional[OfflineUserDataJob] = None
) -> CreateOfflineUserDataJobResponse:
"""
Create an offline user data job for customer match operations.
Args:
request: The request object
customer_id: Required customer ID
job: Offline user data job configuration
Returns:
CreateOfflineUserDataJobResponse with job resource name
"""
def get_offline_user_data_job(
self,
request: Optional[GetOfflineUserDataJobRequest] = None,
resource_name: Optional[str] = None
) -> OfflineUserDataJob:
"""
Retrieve offline user data job by resource name.
Args:
request: The request object
resource_name: Offline user data job resource name
Returns:
OfflineUserDataJob resource object
"""
def add_offline_user_data_job_operations(
self,
request: Optional[AddOfflineUserDataJobOperationsRequest] = None,
resource_name: Optional[str] = None,
enable_partial_failure: Optional[bool] = None,
enable_warnings: Optional[bool] = None,
operations: Optional[Sequence[OfflineUserDataJobOperation]] = None
) -> AddOfflineUserDataJobOperationsResponse:
"""
Add operations to an offline user data job.
Args:
request: The request object
resource_name: Offline user data job resource name
enable_partial_failure: Continue on individual failures
enable_warnings: Enable warning messages
operations: List of user data operations
Returns:
AddOfflineUserDataJobOperationsResponse
"""
def run_offline_user_data_job(
self,
request: Optional[RunOfflineUserDataJobRequest] = None,
resource_name: Optional[str] = None,
validate_only: bool = False
) -> Operation:
"""
Start execution of an offline user data job.
Args:
request: The request object
resource_name: Offline user data job resource name
validate_only: Validate without executing
Returns:
Operation representing the job execution
"""from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.errors import GoogleAdsException
import time
def execute_batch_operations(client, customer_id, operations_list):
"""Execute a large number of operations using batch processing."""
batch_job_service = client.get_service("BatchJobService")
# Step 1: Create batch job
try:
batch_job = batch_job_service.create_batch_job(customer_id=customer_id)
batch_job_resource_name = batch_job.resource_name
print(f"Created batch job: {batch_job_resource_name}")
except GoogleAdsException as ex:
print(f"Failed to create batch job: {ex.error.code().name}")
return None
# Step 2: Add operations to batch job
try:
# Convert operations to MutateOperation format
mutate_operations = []
for operation in operations_list:
mutate_operation = client.get_type("MutateOperation")
# Set the appropriate operation type
if hasattr(operation, 'campaign'):
mutate_operation.campaign_operation = operation
elif hasattr(operation, 'ad_group'):
mutate_operation.ad_group_operation = operation
elif hasattr(operation, 'ad_group_ad'):
mutate_operation.ad_group_ad_operation = operation
elif hasattr(operation, 'ad_group_criterion'):
mutate_operation.ad_group_criterion_operation = operation
mutate_operations.append(mutate_operation)
# Add operations in chunks (max 10,000 per request)
chunk_size = 10000
sequence_token = ""
for i in range(0, len(mutate_operations), chunk_size):
chunk = mutate_operations[i:i + chunk_size]
response = batch_job_service.add_batch_job_operations(
resource_name=batch_job_resource_name,
sequence_token=sequence_token,
mutate_operations=chunk
)
sequence_token = response.next_sequence_token
print(f"Added chunk {i//chunk_size + 1}: {len(chunk)} operations")
except GoogleAdsException as ex:
print(f"Failed to add operations: {ex.error.code().name}")
return None
# Step 3: Run the batch job
try:
operation = batch_job_service.run_batch_job(
resource_name=batch_job_resource_name
)
print(f"Started batch job execution: {operation.name}")
except GoogleAdsException as ex:
print(f"Failed to run batch job: {ex.error.code().name}")
return None
# Step 4: Poll for completion
while True:
batch_job = batch_job_service.get_batch_job(
resource_name=batch_job_resource_name
)
status = batch_job.status
print(f"Batch job status: {status.name}")
if status == client.enums.BatchJobStatusEnum.DONE:
print("Batch job completed successfully!")
break
elif status == client.enums.BatchJobStatusEnum.FAILED:
print("Batch job failed!")
return None
elif status in [client.enums.BatchJobStatusEnum.PENDING, client.enums.BatchJobStatusEnum.RUNNING]:
print("Batch job still processing... waiting 30 seconds")
time.sleep(30)
else:
print(f"Unknown status: {status}")
return None
# Step 5: Retrieve results
try:
results_response = batch_job_service.list_batch_job_results(
resource_name=batch_job_resource_name
)
successful_operations = 0
failed_operations = 0
for result in results_response.results:
if result.status and result.status.code != 0:
failed_operations += 1
print(f"Operation failed: {result.status.message}")
else:
successful_operations += 1
print(f"Results: {successful_operations} successful, {failed_operations} failed")
return results_response.results
except GoogleAdsException as ex:
print(f"Failed to retrieve results: {ex.error.code().name}")
return None
# Example usage - batch create campaigns
def create_campaigns_batch(client, customer_id, campaign_data_list):
"""Create multiple campaigns using batch processing."""
operations = []
for campaign_data in campaign_data_list:
operation = client.get_type("CampaignOperation")
campaign = operation.create
campaign.name = campaign_data['name']
campaign.advertising_channel_type = campaign_data['channel_type']
campaign.status = client.enums.CampaignStatusEnum.PAUSED
campaign.campaign_budget = campaign_data['budget_resource_name']
# Set bidding strategy
if campaign_data.get('bidding_strategy') == 'manual_cpc':
campaign.manual_cpc.enhanced_cpc_enabled = True
elif campaign_data.get('bidding_strategy') == 'target_cpa':
campaign.target_cpa.target_cpa_micros = campaign_data.get('target_cpa_micros', 5000000)
operations.append(operation)
return execute_batch_operations(client, customer_id, operations)def batch_keyword_operations(client, customer_id, keyword_operations_data):
"""Perform batch keyword operations (add, update, remove)."""
operations = []
for keyword_data in keyword_operations_data:
operation = client.get_type("AdGroupCriterionOperation")
if keyword_data['action'] == 'create':
criterion = operation.create
criterion.ad_group = keyword_data['ad_group_resource_name']
criterion.status = client.enums.AdGroupCriterionStatusEnum.ENABLED
criterion.type_ = client.enums.CriterionTypeEnum.KEYWORD
criterion.keyword.text = keyword_data['keyword_text']
criterion.keyword.match_type = keyword_data['match_type']
criterion.cpc_bid_micros = keyword_data.get('cpc_bid_micros')
elif keyword_data['action'] == 'update':
criterion = operation.update
criterion.resource_name = keyword_data['resource_name']
criterion.cpc_bid_micros = keyword_data['new_cpc_bid_micros']
operation.update_mask = client.get_type("FieldMask", paths=["cpc_bid_micros"])
elif keyword_data['action'] == 'remove':
operation.remove = keyword_data['resource_name']
operations.append(operation)
return execute_batch_operations(client, customer_id, operations)
# Example usage
keyword_operations = [
{
'action': 'create',
'ad_group_resource_name': 'customers/123/adGroups/456',
'keyword_text': 'running shoes',
'match_type': client.enums.KeywordMatchTypeEnum.EXACT,
'cpc_bid_micros': 2000000
},
{
'action': 'update',
'resource_name': 'customers/123/adGroupCriteria/789',
'new_cpc_bid_micros': 2500000
},
{
'action': 'remove',
'resource_name': 'customers/123/adGroupCriteria/101112'
}
]
batch_keyword_operations(client, customer_id, keyword_operations)import hashlib
def upload_customer_match_emails(client, customer_id, user_list_resource_name, email_addresses):
"""Upload email addresses to a customer match user list."""
offline_user_data_job_service = client.get_service("OfflineUserDataJobService")
# Step 1: Create offline user data job
job = client.get_type("OfflineUserDataJob")
job.type_ = client.enums.OfflineUserDataJobTypeEnum.CUSTOMER_MATCH_USER_LIST
job.customer_match_user_list_metadata.user_list = user_list_resource_name
try:
create_job_response = offline_user_data_job_service.create_offline_user_data_job(
customer_id=customer_id,
job=job
)
job_resource_name = create_job_response.resource_name
print(f"Created offline user data job: {job_resource_name}")
except GoogleAdsException as ex:
print(f"Failed to create job: {ex.error.code().name}")
return None
# Step 2: Add user data operations
try:
operations = []
for email in email_addresses:
operation = client.get_type("OfflineUserDataJobOperation")
user_data = operation.create
# Hash the email address for privacy
hashed_email = hashlib.sha256(email.lower().strip().encode()).hexdigest()
user_identifier = client.get_type("UserIdentifier")
user_identifier.hashed_email = hashed_email
user_data.user_identifiers.append(user_identifier)
operations.append(operation)
# Add operations in chunks
chunk_size = 100000 # Maximum per request
for i in range(0, len(operations), chunk_size):
chunk = operations[i:i + chunk_size]
offline_user_data_job_service.add_offline_user_data_job_operations(
resource_name=job_resource_name,
enable_partial_failure=True,
operations=chunk
)
print(f"Added chunk {i//chunk_size + 1}: {len(chunk)} email operations")
except GoogleAdsException as ex:
print(f"Failed to add operations: {ex.error.code().name}")
return None
# Step 3: Run the job
try:
operation = offline_user_data_job_service.run_offline_user_data_job(
resource_name=job_resource_name
)
print(f"Started user data job: {operation.name}")
except GoogleAdsException as ex:
print(f"Failed to run job: {ex.error.code().name}")
return None
# Step 4: Monitor job progress
while True:
job_status = offline_user_data_job_service.get_offline_user_data_job(
resource_name=job_resource_name
)
status = job_status.status
print(f"Job status: {status.name}")
if status == client.enums.OfflineUserDataJobStatusEnum.SUCCESS:
print("User data job completed successfully!")
print(f"Matched users: {job_status.customer_match_user_list_metadata.user_list_size_for_search}")
break
elif status == client.enums.OfflineUserDataJobStatusEnum.FAILED:
print(f"User data job failed: {job_status.failure_reason}")
break
elif status in [client.enums.OfflineUserDataJobStatusEnum.PENDING, client.enums.OfflineUserDataJobStatusEnum.RUNNING]:
print("Job still processing... waiting 60 seconds")
time.sleep(60)
else:
print(f"Unknown status: {status}")
break
return job_resource_name
# Example usage
customer_emails = [
"john.doe@example.com",
"jane.smith@example.com",
"customer@company.com"
# ... thousands more emails
]
upload_customer_match_emails(client, customer_id, user_list_resource_name, customer_emails)def upload_store_sales_data(client, customer_id, conversion_action_resource_name, store_sales_data):
"""Upload store sales data for conversion tracking."""
offline_user_data_job_service = client.get_service("OfflineUserDataJobService")
# Create store sales upload job
job = client.get_type("OfflineUserDataJob")
job.type_ = client.enums.OfflineUserDataJobTypeEnum.STORE_SALES_UPLOAD_FIRST_PARTY
# Configure store sales metadata
store_sales_metadata = job.store_sales_metadata
store_sales_metadata.loyalty_fraction = 0.8 # 80% of sales are from loyalty customers
store_sales_metadata.transaction_upload_fraction = 1.0 # Upload 100% of transactions
create_job_response = offline_user_data_job_service.create_offline_user_data_job(
customer_id=customer_id,
job=job
)
job_resource_name = create_job_response.resource_name
# Add store sales operations
operations = []
for sale_data in store_sales_data:
operation = client.get_type("OfflineUserDataJobOperation")
user_data = operation.create
# Add user identifiers
if 'email' in sale_data:
hashed_email = hashlib.sha256(sale_data['email'].lower().encode()).hexdigest()
user_identifier = client.get_type("UserIdentifier")
user_identifier.hashed_email = hashed_email
user_data.user_identifiers.append(user_identifier)
if 'phone' in sale_data:
hashed_phone = hashlib.sha256(sale_data['phone'].encode()).hexdigest()
user_identifier = client.get_type("UserIdentifier")
user_identifier.hashed_phone_number = hashed_phone
user_data.user_identifiers.append(user_identifier)
# Add transaction data
transaction_attribute = client.get_type("TransactionAttribute")
transaction_attribute.transaction_date_time = sale_data['transaction_date_time']
transaction_attribute.transaction_amount_micros = int(sale_data['amount'] * 1_000_000)
transaction_attribute.currency_code = sale_data.get('currency_code', 'USD')
transaction_attribute.conversion_action = conversion_action_resource_name
if 'order_id' in sale_data:
transaction_attribute.order_id = sale_data['order_id']
user_data.transaction_attribute = transaction_attribute
operations.append(operation)
# Add operations to job
offline_user_data_job_service.add_offline_user_data_job_operations(
resource_name=job_resource_name,
enable_partial_failure=True,
operations=operations
)
# Run the job
offline_user_data_job_service.run_offline_user_data_job(
resource_name=job_resource_name
)
return job_resource_namedef monitor_batch_job_progress(client, batch_job_resource_name, poll_interval=30):
"""Monitor batch job progress and provide status updates."""
batch_job_service = client.get_service("BatchJobService")
start_time = time.time()
while True:
try:
batch_job = batch_job_service.get_batch_job(
resource_name=batch_job_resource_name
)
status = batch_job.status
elapsed_time = time.time() - start_time
progress_info = {
'status': status.name,
'elapsed_minutes': elapsed_time / 60,
'total_operations': batch_job.total_operations,
'processed_operations': batch_job.processed_operations,
'progress_percentage': 0
}
if batch_job.total_operations > 0:
progress_info['progress_percentage'] = (
batch_job.processed_operations / batch_job.total_operations * 100
)
print(f"Status: {progress_info['status']} | "
f"Progress: {progress_info['progress_percentage']:.1f}% | "
f"Elapsed: {progress_info['elapsed_minutes']:.1f} min")
if status == client.enums.BatchJobStatusEnum.DONE:
print("✅ Batch job completed successfully!")
return True
elif status == client.enums.BatchJobStatusEnum.FAILED:
print("❌ Batch job failed!")
return False
time.sleep(poll_interval)
except GoogleAdsException as ex:
print(f"Error monitoring batch job: {ex.error.code().name}")
return Falsedef analyze_batch_job_results(client, batch_job_resource_name):
"""Analyze results from a completed batch job."""
batch_job_service = client.get_service("BatchJobService")
# Get all results
results_response = batch_job_service.list_batch_job_results(
resource_name=batch_job_resource_name
)
# Analyze results
total_operations = 0
successful_operations = 0
failed_operations = 0
error_summary = {}
for result in results_response.results:
total_operations += 1
if result.status and result.status.code != 0:
failed_operations += 1
error_code = result.status.code
error_message = result.status.message
if error_code not in error_summary:
error_summary[error_code] = {
'count': 0,
'message': error_message
}
error_summary[error_code]['count'] += 1
else:
successful_operations += 1
# Print summary
print(f"\n📊 Batch Job Results Summary:")
print(f"Total Operations: {total_operations}")
print(f"Successful: {successful_operations} ({successful_operations/total_operations*100:.1f}%)")
print(f"Failed: {failed_operations} ({failed_operations/total_operations*100:.1f}%)")
if error_summary:
print(f"\n❌ Error Breakdown:")
for error_code, error_info in error_summary.items():
print(f" Code {error_code}: {error_info['count']} occurrences")
print(f" Message: {error_info['message']}")
return {
'total': total_operations,
'successful': successful_operations,
'failed': failed_operations,
'errors': error_summary
}# Batch job types
class BatchJob:
resource_name: str
id: Optional[int]
next_add_sequence_token: str
status: BatchJobStatusEnum.BatchJobStatus
long_running_operation: str
creation_date_time: str
start_date_time: str
completion_date_time: str
total_operations: int
processed_operations: int
class BatchJobResult:
operation_index: Optional[int]
mutate_operation_response: MutateOperationResponse
status: Status
class MutateOperation:
ad_group_ad_label_operation: AdGroupAdLabelOperation
ad_group_ad_operation: AdGroupAdOperation
ad_group_asset_operation: AdGroupAssetOperation
ad_group_bid_modifier_operation: AdGroupBidModifierOperation
ad_group_criterion_customizer_operation: AdGroupCriterionCustomizerOperation
ad_group_criterion_label_operation: AdGroupCriterionLabelOperation
ad_group_criterion_operation: AdGroupCriterionOperation
ad_group_customizer_operation: AdGroupCustomizerOperation
ad_group_extension_setting_operation: AdGroupExtensionSettingOperation
ad_group_feed_operation: AdGroupFeedOperation
ad_group_label_operation: AdGroupLabelOperation
ad_group_operation: AdGroupOperation
ad_operation: AdOperation
ad_parameter_operation: AdParameterOperation
asset_group_asset_operation: AssetGroupAssetOperation
asset_group_listing_group_filter_operation: AssetGroupListingGroupFilterOperation
asset_group_operation: AssetGroupOperation
asset_group_signal_operation: AssetGroupSignalOperation
asset_operation: AssetOperation
asset_set_asset_operation: AssetSetAssetOperation
asset_set_operation: AssetSetOperation
audience_operation: AudienceOperation
bidding_data_exclusion_operation: BiddingDataExclusionOperation
bidding_seasonality_adjustment_operation: BiddingSeasonalityAdjustmentOperation
bidding_strategy_operation: BiddingStrategyOperation
campaign_asset_operation: CampaignAssetOperation
campaign_asset_set_operation: CampaignAssetSetOperation
campaign_bid_modifier_operation: CampaignBidModifierOperation
campaign_budget_operation: CampaignBudgetOperation
campaign_conversion_goal_operation: CampaignConversionGoalOperation
campaign_criterion_operation: CampaignCriterionOperation
campaign_customizer_operation: CampaignCustomizerOperation
campaign_draft_operation: CampaignDraftOperation
campaign_extension_setting_operation: CampaignExtensionSettingOperation
campaign_feed_operation: CampaignFeedOperation
campaign_group_operation: CampaignGroupOperation
campaign_label_operation: CampaignLabelOperation
campaign_operation: CampaignOperation
campaign_shared_set_operation: CampaignSharedSetOperation
conversion_action_operation: ConversionActionOperation
conversion_custom_variable_operation: ConversionCustomVariableOperation
conversion_goal_campaign_config_operation: ConversionGoalCampaignConfigOperation
conversion_value_rule_operation: ConversionValueRuleOperation
conversion_value_rule_set_operation: ConversionValueRuleSetOperation
custom_audience_operation: CustomAudienceOperation
custom_conversion_goal_operation: CustomConversionGoalOperation
custom_interest_operation: CustomInterestOperation
customer_asset_operation: CustomerAssetOperation
customer_conversion_goal_operation: CustomerConversionGoalOperation
customer_customizer_operation: CustomerCustomizerOperation
customer_extension_setting_operation: CustomerExtensionSettingOperation
customer_feed_operation: CustomerFeedOperation
customer_label_operation: CustomerLabelOperation
customer_negative_criterion_operation: CustomerNegativeCriterionOperation
customizer_attribute_operation: CustomizerAttributeOperation
extension_feed_item_operation: ExtensionFeedItemOperation
feed_item_operation: FeedItemOperation
feed_item_set_link_operation: FeedItemSetLinkOperation
feed_item_set_operation: FeedItemSetOperation
feed_item_target_operation: FeedItemTargetOperation
feed_mapping_operation: FeedMappingOperation
feed_operation: FeedOperation
keyword_plan_ad_group_keyword_operation: KeywordPlanAdGroupKeywordOperation
keyword_plan_ad_group_operation: KeywordPlanAdGroupOperation
keyword_plan_campaign_keyword_operation: KeywordPlanCampaignKeywordOperation
keyword_plan_campaign_operation: KeywordPlanCampaignOperation
keyword_plan_operation: KeywordPlanOperation
label_operation: LabelOperation
media_file_operation: MediaFileOperation
remarketing_action_operation: RemarketingActionOperation
shared_criterion_operation: SharedCriterionOperation
shared_set_operation: SharedSetOperation
user_list_operation: UserListOperation
# Offline user data job types
class OfflineUserDataJob:
resource_name: str
id: Optional[int]
external_id: Optional[int]
type_: OfflineUserDataJobTypeEnum.OfflineUserDataJobType
status: OfflineUserDataJobStatusEnum.OfflineUserDataJobStatus
failure_reason: OfflineUserDataJobFailureReasonEnum.OfflineUserDataJobFailureReason
operation_filter: OfflineUserDataJobMatchRateRange
customer_match_user_list_metadata: CustomerMatchUserListMetadata
store_sales_metadata: StoreSalesMetadata
class CustomerMatchUserListMetadata:
user_list: Optional[str]
consent: Consent
user_list_size_for_search: Optional[int]
user_list_size_for_display: Optional[int]
class StoreSalesMetadata:
loyalty_fraction: Optional[float]
transaction_upload_fraction: Optional[float]
custom_key: Optional[str]
third_party_metadata: StoreSalesThirdPartyMetadata
class OfflineUserDataJobOperation:
create: UserData
remove: UserData
remove_all: bool
class UserData:
user_identifiers: List[UserIdentifier]
transaction_attribute: TransactionAttribute
user_attribute: UserAttribute
class TransactionAttribute:
transaction_date_time: Optional[str]
transaction_amount_micros: Optional[int]
currency_code: Optional[str]
conversion_action: Optional[str]
order_id: Optional[str]
store_attribute: StoreAttribute
custom_value: Optional[str]
item_attribute: ItemAttribute
class UserIdentifier:
user_identifier_source: UserIdentifierSourceEnum.UserIdentifierSource
hashed_email: Optional[str]
hashed_phone_number: Optional[str]
mobile_id: Optional[str]
third_party_user_id: Optional[str]
address_info: OfflineUserAddressInfo
# Operation types
class BatchJobOperation:
update_mask: FieldMask
create: BatchJob
update: BatchJob
remove: str
# Request types
class AddBatchJobOperationsRequest:
resource_name: str
sequence_token: str
mutate_operations: List[MutateOperation]
class RunBatchJobRequest:
resource_name: str
class ListBatchJobResultsRequest:
resource_name: str
page_token: str
page_size: int
response_content_type: ResponseContentTypeEnum.ResponseContentType
class AddOfflineUserDataJobOperationsRequest:
resource_name: str
enable_partial_failure: bool
enable_warnings: bool
operations: List[OfflineUserDataJobOperation]
class RunOfflineUserDataJobRequest:
resource_name: str
validate_only: bool
# Response types
class MutateBatchJobResponse:
result: MutateBatchJobResult
class MutateBatchJobResult:
resource_name: str
batch_job: BatchJob
class AddBatchJobOperationsResponse:
total_operations: int
next_sequence_token: str
class ListBatchJobResultsResponse:
results: List[BatchJobResult]
next_page_token: str
class CreateOfflineUserDataJobResponse:
resource_name: str
class AddOfflineUserDataJobOperationsResponse:
partial_failure_error: Status
warning: StatusInstall with Tessl CLI
npx tessl i tessl/pypi-google-ads