Python client library for managing BigQuery Data Transfer Service operations and scheduling data transfers from partner SaaS applications.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Complete lifecycle management of data transfer configurations, including creation, updates, deletion, and listing operations with support for various scheduling options and data source parameters.
Creates a new data transfer configuration.
def create_transfer_config(
self,
request: Optional[Union[CreateTransferConfigRequest, dict]] = None,
*,
parent: Optional[str] = None,
transfer_config: Optional[TransferConfig] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> TransferConfig:
"""
Creates a new data transfer configuration.
Args:
request: The request object containing transfer config details.
parent: Required. The BigQuery project id where the transfer configuration should be created.
transfer_config: Required. Data transfer configuration to create.
retry: Designation of what errors should be retried.
timeout: The timeout for this request.
metadata: Strings which should be sent along with the request.
Returns:
TransferConfig: The created transfer configuration.
"""Updates a data transfer configuration.
def update_transfer_config(
self,
request: Optional[Union[UpdateTransferConfigRequest, dict]] = None,
*,
transfer_config: Optional[TransferConfig] = None,
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> TransferConfig:
"""
Updates a data transfer configuration.
Args:
request: The request object containing transfer config updates.
transfer_config: Required. Data transfer configuration to update.
update_mask: Required. Field mask specifying which fields to update.
retry: Designation of what errors should be retried.
timeout: The timeout for this request.
metadata: Strings which should be sent along with the request.
Returns:
TransferConfig: The updated transfer configuration.
"""Returns information about a transfer config.
def get_transfer_config(
self,
request: Optional[Union[GetTransferConfigRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> TransferConfig:
"""
Returns information about a transfer config.
Args:
request: The request object containing transfer config name.
name: Required. The field will contain name of the resource requested.
retry: Designation of what errors should be retried.
timeout: The timeout for this request.
metadata: Strings which should be sent along with the request.
Returns:
TransferConfig: The requested transfer configuration.
"""Deletes a data transfer configuration.
def delete_transfer_config(
self,
request: Optional[Union[DeleteTransferConfigRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
"""
Deletes a data transfer configuration.
Args:
request: The request object containing transfer config name.
name: Required. The field will contain name of the resource requested.
retry: Designation of what errors should be retried.
timeout: The timeout for this request.
metadata: Strings which should be sent along with the request.
"""Returns information about running and completed transfer configs.
def list_transfer_configs(
self,
request: Optional[Union[ListTransferConfigsRequest, dict]] = None,
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListTransferConfigsPager:
"""
Returns information about running and completed transfer configs.
Args:
request: The request object containing parent location.
parent: Required. The BigQuery project id for which transfer configs should be returned.
retry: Designation of what errors should be retried.
timeout: The timeout for this request.
metadata: Strings which should be sent along with the request.
Returns:
ListTransferConfigsResponse: The list of transfer configurations.
"""class CreateTransferConfigRequest:
"""
A request to create a data transfer configuration.
Attributes:
parent (str): Required. The BigQuery project id where the transfer configuration should be created.
Format: projects/{project_id}/locations/{location_id}
transfer_config (TransferConfig): Required. Data transfer configuration to create.
authorization_code (str): Optional authorization code provided by the transfer service.
version_info (str): Optional version info to identify the transfer configuration template.
service_account_name (str): Optional service account name.
"""
parent: str
transfer_config: TransferConfig
authorization_code: str
version_info: str
service_account_name: str
class UpdateTransferConfigRequest:
"""
A request to update a data transfer configuration.
Attributes:
transfer_config (TransferConfig): Required. Data transfer configuration to update.
authorization_code (str): Optional authorization code provided by the transfer service.
update_mask (FieldMask): Required. Field mask specifying the fields to be updated.
version_info (str): Optional version info to identify the transfer configuration template.
service_account_name (str): Optional service account name.
"""
transfer_config: TransferConfig
authorization_code: str
update_mask: field_mask_pb2.FieldMask
version_info: str
service_account_name: str
class GetTransferConfigRequest:
"""
A request to get information about a transfer config.
Attributes:
name (str): Required. The field will contain name of the resource requested.
Format: projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}
"""
name: str
class DeleteTransferConfigRequest:
"""
A request to delete a data transfer configuration.
Attributes:
name (str): Required. The field will contain name of the resource requested.
Format: projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}
"""
name: str
class ListTransferConfigsRequest:
"""
A request message for ListTransferConfigs.
Attributes:
parent (str): Required. The BigQuery project id for which transfer configs should be returned.
Format: projects/{project_id}/locations/{location_id}
data_source_ids (Sequence[str]): When specified, only configurations of requested data sources are returned.
page_token (str): Pagination token.
page_size (int): Page size. The default page size is the maximum value of 1000 results.
"""
parent: str
data_source_ids: Sequence[str]
page_token: str
page_size: intclass ListTransferConfigsResponse:
"""
The returned list of pipelines in the project.
Attributes:
transfer_configs (Sequence[TransferConfig]): Output only. The stored pipeline transfer configurations.
next_page_token (str): Output only. The next-pagination token.
"""
transfer_configs: Sequence[TransferConfig]
next_page_token: strfrom google.cloud import bigquery_datatransfer
from google.protobuf import struct_pb2
client = bigquery_datatransfer.DataTransferServiceClient()
# Create parameters as a struct
params = struct_pb2.Struct()
params.update({
"query": "SELECT * FROM `project.dataset.source_table` WHERE DATE(_PARTITIONTIME) = @run_date",
"destination_table_name_template": "destination_table_{run_date}",
"use_legacy_sql": False,
})
# Create transfer configuration
transfer_config = {
"display_name": "Daily ETL Transfer",
"data_source_id": "scheduled_query",
"destination_dataset_id": "my_dataset",
"schedule": "every day 08:00",
"params": params,
"email_preferences": {
"enable_failure_email": True
}
}
parent = f"projects/{project_id}/locations/{location}"
response = client.create_transfer_config(
parent=parent,
transfer_config=transfer_config
)
print(f"Created transfer config: {response.display_name}")
print(f"Config ID: {response.name}")from google.cloud import bigquery_datatransfer
from google.protobuf import field_mask_pb2
client = bigquery_datatransfer.DataTransferServiceClient()
# Define what fields to update
update_mask = field_mask_pb2.FieldMask()
update_mask.paths.extend(["display_name", "schedule", "disabled"])
# Updated configuration
transfer_config = {
"name": f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}",
"display_name": "Updated Daily ETL Transfer",
"schedule": "every day 10:00",
"disabled": False
}
response = client.update_transfer_config(
transfer_config=transfer_config,
update_mask=update_mask
)
print(f"Updated transfer config: {response.display_name}")from google.cloud import bigquery_datatransfer
client = bigquery_datatransfer.DataTransferServiceClient()
# List all transfer configs
parent = f"projects/{project_id}/locations/{location}"
response = client.list_transfer_configs(parent=parent)
print("Transfer configurations:")
for config in response:
print(f" {config.display_name}")
print(f" ID: {config.name}")
print(f" Data Source: {config.data_source_id}")
print(f" Schedule: {config.schedule}")
print(f" Disabled: {config.disabled}")
print(f" State: {config.state}")
# List configs for specific data sources only
response = client.list_transfer_configs(
parent=parent,
data_source_ids=["scheduled_query", "google_ads"]
)
print("Scheduled query and Google Ads configs:")
for config in response:
print(f" {config.display_name} ({config.data_source_id})")from google.cloud import bigquery_datatransfer
client = bigquery_datatransfer.DataTransferServiceClient()
# Get specific transfer config
config_name = f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}"
config = client.get_transfer_config(name=config_name)
print(f"Transfer Config: {config.display_name}")
print(f"Data Source: {config.data_source_id}")
print(f"Destination Dataset: {config.destination_dataset_id}")
print(f"Schedule: {config.schedule}")
print(f"State: {config.state}")
print(f"User ID: {config.user_id}")
# Print parameters
print("Parameters:")
for key, value in config.params.items():
print(f" {key}: {value}")from google.cloud import bigquery_datatransfer
client = bigquery_datatransfer.DataTransferServiceClient()
# Delete transfer config
config_name = f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}"
client.delete_transfer_config(name=config_name)
print(f"Deleted transfer config: {config_name}")Install with Tessl CLI
npx tessl i tessl/pypi-google-cloud-bigquery-datatransfer