Python client library for managing BigQuery Data Transfer Service operations and scheduling data transfers from partner SaaS applications.
—
Primary client interfaces for interacting with Google Cloud BigQuery Data Transfer Service. Both synchronous and asynchronous clients provide identical functionality with comprehensive CRUD operations for transfer configurations and monitoring capabilities.
The main synchronous client class providing access to all BigQuery Data Transfer Service operations.
class DataTransferServiceClient:
def __init__(
self,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Optional[Union[str, DataTransferServiceTransport]] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
):
"""
Instantiate the data transfer service client.
Args:
credentials: The authorization credentials to attach to requests.
transport: The transport to use for API calls.
client_options: Client options for configuring requests.
client_info: Information about the client library.
"""@property
def transport(self) -> DataTransferServiceTransport:
"""Returns the transport used by the client instance."""
@property
def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance."""
@property
def universe_domain(self) -> str:
"""Return the universe domain used by the client instance."""@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""
Create a client from service account info dict.
Args:
info: Service account info in Google format.
Returns:
DataTransferServiceClient: The constructed client.
"""
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""
Create a client from a service account json file.
Args:
filename: Path to service account json file.
Returns:
DataTransferServiceClient: The constructed client.
"""def __enter__(self) -> "DataTransferServiceClient":
"""Enter context manager."""
def __exit__(self, type, value, traceback):
"""Exit context manager."""The asynchronous version of the client providing the same functionality with async/await support.
class DataTransferServiceAsyncClient:
def __init__(
self,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Optional[Union[str, DataTransferServiceAsyncTransport]] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
):
"""
Instantiate the async data transfer service client.
Args:
credentials: The authorization credentials to attach to requests.
transport: The transport to use for API calls.
client_options: Client options for configuring requests.
client_info: Information about the client library.
"""All methods in DataTransferServiceAsyncClient mirror those in DataTransferServiceClient but return coroutines that must be awaited.
from google.cloud import bigquery_datatransfer
# Using default credentials
client = bigquery_datatransfer.DataTransferServiceClient()
# Using service account file
client = bigquery_datatransfer.DataTransferServiceClient.from_service_account_file(
"path/to/service-account.json"
)
# Using service account info
service_account_info = {
"type": "service_account",
"project_id": "your-project",
# ... other service account fields
}
client = bigquery_datatransfer.DataTransferServiceClient.from_service_account_info(
service_account_info
)import asyncio
from google.cloud import bigquery_datatransfer
async def main():
# Create async client
client = bigquery_datatransfer.DataTransferServiceAsyncClient()
# Use async methods
parent = f"projects/{project_id}/locations/{location}"
response = await client.list_data_sources(parent=parent)
async for data_source in response:
print(f"Data Source: {data_source.display_name}")
# Clean up
await client.transport.close()
# Run async function
asyncio.run(main())from google.cloud import bigquery_datatransfer
# Automatic resource cleanup
with bigquery_datatransfer.DataTransferServiceClient() as client:
parent = f"projects/{project_id}/locations/{location}"
data_sources = client.list_data_sources(parent=parent)
for data_source in data_sources:
print(f"Data Source: {data_source.display_name}")Both client classes provide static methods for constructing and parsing resource paths:
@staticmethod
def data_source_path(project: str, location: str, data_source: str) -> str:
"""Return a fully-qualified data source string."""
@staticmethod
def parse_data_source_path(path: str) -> Dict[str, str]:
"""Parse a data source path into its component segments."""
@staticmethod
def transfer_config_path(project: str, location: str, transfer_config: str) -> str:
"""Return a fully-qualified transfer config string."""
@staticmethod
def parse_transfer_config_path(path: str) -> Dict[str, str]:
"""Parse a transfer config path into its component segments."""
@staticmethod
def run_path(project: str, location: str, transfer_config: str, run: str) -> str:
"""Return a fully-qualified run string."""
@staticmethod
def parse_run_path(path: str) -> Dict[str, str]:
"""Parse a run path into its component segments."""@staticmethod
def common_billing_account_path(billing_account: str) -> str:
"""Return a fully-qualified billing account string."""
@staticmethod
def common_folder_path(folder: str) -> str:
"""Return a fully-qualified folder string."""
@staticmethod
def common_organization_path(organization: str) -> str:
"""Return a fully-qualified organization string."""
@staticmethod
def common_project_path(project: str) -> str:
"""Return a fully-qualified project string."""
@staticmethod
def common_location_path(project: str, location: str) -> str:
"""Return a fully-qualified location string."""Install with Tessl CLI
npx tessl i tessl/pypi-google-cloud-bigquery-datatransfer