Python client library for Google Cloud Retail API enabling end-to-end personalized recommendation systems
—
Analytics data export and metrics collection for business intelligence and reporting. The Analytics Service provides comprehensive data export capabilities to extract insights from user behavior, product performance, and recommendation effectiveness.
Export analytics metrics and data for external analysis and business intelligence systems.
class AnalyticsServiceClient:
def export_analytics_metrics(self, request: ExportAnalyticsMetricsRequest) -> Operation:
"""
Exports analytics metrics data to external destinations (long-running operation).
Args:
request: Contains catalog, output configuration, and export filters
Returns:
Operation: Resolves to ExportAnalyticsMetricsResponse with export details
Raises:
InvalidArgument: If export parameters are invalid
PermissionDenied: If insufficient permissions for export destination
"""
class AnalyticsServiceAsyncClient:
async def export_analytics_metrics(self, request: ExportAnalyticsMetricsRequest) -> Operation:
"""
Exports analytics metrics data to external destinations (long-running operation).
Args:
request: Contains catalog, output configuration, and export filters
Returns:
Operation: Resolves to ExportAnalyticsMetricsResponse with export details
Raises:
InvalidArgument: If export parameters are invalid
PermissionDenied: If insufficient permissions for export destination
"""Comprehensive configuration for analytics data export operations.
class ExportAnalyticsMetricsRequest:
catalog: str # Catalog resource name (required)
output_config: OutputConfig # Export destination configuration (required)
filter: str # Filter expression for data to export
class OutputConfig:
destination: str # Export destination type
gcs_destination: GcsDestination # Google Cloud Storage destination
bigquery_destination: BigQueryDestination # BigQuery destination
class GcsDestination:
output_uri_prefix: str # GCS URI prefix for output files (required)
class BigQueryDestination:
project_id: str # BigQuery project ID (required)
dataset_id: str # BigQuery dataset ID (required)
table_id: str # BigQuery table ID (required)
table_type: str # Table type (TABLE, VIEW)Results and metadata from analytics export operations.
class ExportAnalyticsMetricsResponse:
error_samples: List[Status] # Sample of errors encountered during export
errors_config: ExportErrorsConfig # Error handling configuration used
output_result: OutputResult # Details about exported data
class OutputResult:
bigquery_result: List[BigQueryOutputResult] # BigQuery export results
gcs_result: List[GcsOutputResult] # GCS export results
class BigQueryOutputResult:
dataset_id: str # BigQuery dataset ID
table_id: str # BigQuery table ID
class GcsOutputResult:
output_uri: str # GCS URI of exported file
bytes_count: int # Number of bytes exportedConfiguration for handling errors during export operations.
class ExportErrorsConfig:
gcs_prefix: str # GCS prefix for error files
class ExportMetadata:
create_time: Timestamp # Export operation start time
update_time: Timestamp # Last update time
partial_failure: bool # Whether export had partial failures
request: ExportAnalyticsMetricsRequest # Original export requestVarious analytics metrics and data types available for export.
# Analytics metric types that can be exported
ANALYTICS_METRIC_SEARCH_ANALYTICS = "search-analytics" # Search behavior and performance
ANALYTICS_METRIC_USER_EVENTS = "user-events" # User interaction events
ANALYTICS_METRIC_PRODUCT_PERFORMANCE = "product-performance" # Product view/purchase metrics
ANALYTICS_METRIC_RECOMMENDATION_PERFORMANCE = "recommendation-performance" # Recommendation effectiveness
ANALYTICS_METRIC_REVENUE_ANALYTICS = "revenue-analytics" # Revenue and conversion metrics
ANALYTICS_METRIC_CATALOG_ANALYTICS = "catalog-analytics" # Catalog usage and coverage
# Filter expressions for analytics data
# Time-based filters
FILTER_TIME_RANGE = 'event_time >= "2024-01-01T00:00:00Z" AND event_time <= "2024-01-31T23:59:59Z"'
# Event type filters
FILTER_PURCHASE_EVENTS = 'event_type = "purchase-complete"'
FILTER_SEARCH_EVENTS = 'event_type = "search"'
FILTER_VIEW_EVENTS = 'event_type = "page-view"'
# Product filters
FILTER_PRODUCT_CATEGORY = 'product_details.product.categories: ANY("Electronics")'
FILTER_PRICE_RANGE = 'product_details.product.price_info.price >= 100 AND product_details.product.price_info.price <= 1000'
# User filters
FILTER_REGISTERED_USERS = 'user_info.user_id != ""'from google.cloud import retail
client = retail.AnalyticsServiceClient()
# Export user events to BigQuery for analysis
output_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="retail_analytics",
table_id="user_events_2024"
)
)
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=output_config,
filter='event_time >= "2024-01-01T00:00:00Z" AND event_time <= "2024-12-31T23:59:59Z"'
)
operation = client.export_analytics_metrics(request=request)
print(f"Export operation started: {operation.name}")
# Wait for export to complete
result = operation.result()
print("Export completed successfully")
if result.output_result.bigquery_result:
for bq_result in result.output_result.bigquery_result:
print(f"Data exported to: {bq_result.dataset_id}.{bq_result.table_id}")
if result.error_samples:
print(f"Export had {len(result.error_samples)} errors")
for error in result.error_samples[:3]: # Show first 3 errors
print(f"- {error.message}")# Export purchase events and revenue data to GCS
output_config = retail.OutputConfig(
gcs_destination=retail.GcsDestination(
output_uri_prefix="gs://my-analytics-bucket/exports/purchase-data/"
)
)
# Filter for purchase events only
purchase_filter = 'event_type = "purchase-complete" AND event_time >= "2024-01-01T00:00:00Z"'
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=output_config,
filter=purchase_filter
)
operation = client.export_analytics_metrics(request=request)
print(f"Purchase data export operation: {operation.name}")
# Monitor operation progress
result = operation.result()
print("Purchase data export completed")
if result.output_result.gcs_result:
for gcs_result in result.output_result.gcs_result:
print(f"File exported: {gcs_result.output_uri}")
print(f"Size: {gcs_result.bytes_count} bytes")# Export product performance metrics for electronics category
product_performance_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="retail_analytics",
table_id="product_performance_electronics"
)
)
# Filter for electronics products with detailed performance data
electronics_filter = '''
product_details.product.categories: ANY("Electronics") AND
event_time >= "2024-01-01T00:00:00Z" AND
event_type: ANY("page-view", "add-to-cart", "purchase-complete")
'''
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=product_performance_config,
filter=electronics_filter
)
operation = client.export_analytics_metrics(request=request)
result = operation.result()
print("Product performance data exported")
print(f"Electronics performance data available in BigQuery")# Export search analytics to understand search behavior and performance
search_analytics_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="retail_analytics",
table_id="search_analytics_monthly"
)
)
# Filter for search events with query and result information
search_filter = '''
event_type = "search" AND
event_time >= "2024-01-01T00:00:00Z" AND
event_time <= "2024-01-31T23:59:59Z" AND
search_query != ""
'''
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=search_analytics_config,
filter=search_filter
)
operation = client.export_analytics_metrics(request=request)
result = operation.result()
print("Search analytics exported for January 2024")# Export recommendation performance metrics to analyze ML model effectiveness
recommendation_config = retail.OutputConfig(
gcs_destination=retail.GcsDestination(
output_uri_prefix="gs://my-analytics-bucket/recommendations/performance/"
)
)
# Filter for events that have attribution tokens (came from recommendations)
recommendation_filter = '''
attribution_token != "" AND
event_time >= "2024-01-01T00:00:00Z" AND
event_type: ANY("page-view", "add-to-cart", "purchase-complete")
'''
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=recommendation_config,
filter=recommendation_filter
)
operation = client.export_analytics_metrics(request=request)
result = operation.result()
print("Recommendation performance data exported")
for gcs_result in result.output_result.gcs_result:
print(f"Recommendation metrics: {gcs_result.output_uri}")# Export user behavior data for cohort analysis
cohort_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="user_analytics",
table_id="user_cohort_data"
)
)
# Filter for registered users with complete user journey data
user_behavior_filter = '''
user_info.user_id != "" AND
event_time >= "2024-01-01T00:00:00Z" AND
event_type: ANY("page-view", "search", "add-to-cart", "purchase-complete")
'''
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=cohort_config,
filter=user_behavior_filter
)
operation = client.export_analytics_metrics(request=request)
result = operation.result()
print("User behavior data exported for cohort analysis")# Export revenue analytics segmented by product category
revenue_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="revenue_analytics",
table_id="category_revenue_2024"
)
)
# Filter for purchase events with revenue data
revenue_filter = '''
event_type = "purchase-complete" AND
purchase_transaction.revenue > 0 AND
event_time >= "2024-01-01T00:00:00Z" AND
product_details.product.categories: ANY("Electronics", "Clothing", "Books", "Home")
'''
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=revenue_config,
filter=revenue_filter
)
operation = client.export_analytics_metrics(request=request)
result = operation.result()
print("Revenue analytics by category exported")
# After export completes, you can query the BigQuery table for insights:
print("Sample BigQuery analysis queries:")
print("""
-- Total revenue by category
SELECT
UNNEST(product_details.product.categories) as category,
SUM(purchase_transaction.revenue) as total_revenue,
COUNT(*) as transaction_count
FROM `my-analytics-project.revenue_analytics.category_revenue_2024`
GROUP BY category
ORDER BY total_revenue DESC;
-- Monthly revenue trends
SELECT
EXTRACT(MONTH FROM event_time) as month,
SUM(purchase_transaction.revenue) as monthly_revenue,
COUNT(DISTINCT user_info.user_id) as unique_customers
FROM `my-analytics-project.revenue_analytics.category_revenue_2024`
GROUP BY month
ORDER BY month;
""")# Export with comprehensive error handling
errors_config = retail.ExportErrorsConfig(
gcs_prefix="gs://my-analytics-bucket/export-errors/"
)
output_config = retail.OutputConfig(
bigquery_destination=retail.BigQueryDestination(
project_id="my-analytics-project",
dataset_id="retail_analytics",
table_id="user_events_with_errors"
)
)
request = retail.ExportAnalyticsMetricsRequest(
catalog="projects/my-project/locations/global/catalogs/default_catalog",
output_config=output_config,
filter='event_time >= "2024-01-01T00:00:00Z"'
)
operation = client.export_analytics_metrics(request=request)
# Monitor operation progress
import time
while not operation.done():
print("Export in progress...")
time.sleep(30)
result = operation.result()
# Check for errors and partial failures
if result.error_samples:
print(f"Export completed with {len(result.error_samples)} errors:")
for i, error in enumerate(result.error_samples[:5]):
print(f"Error {i+1}: {error.message}")
print(f"Error details saved to: {errors_config.gcs_prefix}")
else:
print("Export completed successfully with no errors")
# Display export results
if result.output_result.bigquery_result:
for bq_result in result.output_result.bigquery_result:
print(f"BigQuery table created: {bq_result.dataset_id}.{bq_result.table_id}")
if result.output_result.gcs_result:
total_bytes = sum(gcs.bytes_count for gcs in result.output_result.gcs_result)
print(f"Total data exported: {total_bytes:,} bytes")
for gcs_result in result.output_result.gcs_result:
print(f"File: {gcs_result.output_uri} ({gcs_result.bytes_count:,} bytes)")Install with Tessl CLI
npx tessl i tessl/pypi-google-cloud-retail