or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

data-sources.mddata-types.mdindex.mdscheduling.mdservice-clients.mdtransfer-configs.mdtransfer-runs.md

transfer-configs.mddocs/

0

# Transfer Configuration Management

1

2

Complete lifecycle management of data transfer configurations, including creation, updates, deletion, and listing operations with support for various scheduling options and data source parameters.

3

4

## Capabilities

5

6

### Create Transfer Configuration

7

8

Creates a new data transfer configuration.

9

10

```python { .api }

11

def create_transfer_config(

12

self,

13

request: Optional[Union[CreateTransferConfigRequest, dict]] = None,

14

*,

15

parent: Optional[str] = None,

16

transfer_config: Optional[TransferConfig] = None,

17

retry: OptionalRetry = gapic_v1.method.DEFAULT,

18

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

19

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

20

) -> TransferConfig:

21

"""

22

Creates a new data transfer configuration.

23

24

Args:

25

request: The request object containing transfer config details.

26

parent: Required. The BigQuery project id where the transfer configuration should be created.

27

transfer_config: Required. Data transfer configuration to create.

28

retry: Designation of what errors should be retried.

29

timeout: The timeout for this request.

30

metadata: Strings which should be sent along with the request.

31

32

Returns:

33

TransferConfig: The created transfer configuration.

34

"""

35

```

36

37

### Update Transfer Configuration

38

39

Updates a data transfer configuration.

40

41

```python { .api }

42

def update_transfer_config(

43

self,

44

request: Optional[Union[UpdateTransferConfigRequest, dict]] = None,

45

*,

46

transfer_config: Optional[TransferConfig] = None,

47

update_mask: Optional[field_mask_pb2.FieldMask] = None,

48

retry: OptionalRetry = gapic_v1.method.DEFAULT,

49

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

50

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

51

) -> TransferConfig:

52

"""

53

Updates a data transfer configuration.

54

55

Args:

56

request: The request object containing transfer config updates.

57

transfer_config: Required. Data transfer configuration to update.

58

update_mask: Required. Field mask specifying which fields to update.

59

retry: Designation of what errors should be retried.

60

timeout: The timeout for this request.

61

metadata: Strings which should be sent along with the request.

62

63

Returns:

64

TransferConfig: The updated transfer configuration.

65

"""

66

```

67

68

### Get Transfer Configuration

69

70

Returns information about a transfer config.

71

72

```python { .api }

73

def get_transfer_config(

74

self,

75

request: Optional[Union[GetTransferConfigRequest, dict]] = None,

76

*,

77

name: Optional[str] = None,

78

retry: OptionalRetry = gapic_v1.method.DEFAULT,

79

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

80

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

81

) -> TransferConfig:

82

"""

83

Returns information about a transfer config.

84

85

Args:

86

request: The request object containing transfer config name.

87

name: Required. The field will contain name of the resource requested.

88

retry: Designation of what errors should be retried.

89

timeout: The timeout for this request.

90

metadata: Strings which should be sent along with the request.

91

92

Returns:

93

TransferConfig: The requested transfer configuration.

94

"""

95

```

96

97

### Delete Transfer Configuration

98

99

Deletes a data transfer configuration.

100

101

```python { .api }

102

def delete_transfer_config(

103

self,

104

request: Optional[Union[DeleteTransferConfigRequest, dict]] = None,

105

*,

106

name: Optional[str] = None,

107

retry: OptionalRetry = gapic_v1.method.DEFAULT,

108

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

109

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

110

) -> None:

111

"""

112

Deletes a data transfer configuration.

113

114

Args:

115

request: The request object containing transfer config name.

116

name: Required. The field will contain name of the resource requested.

117

retry: Designation of what errors should be retried.

118

timeout: The timeout for this request.

119

metadata: Strings which should be sent along with the request.

120

"""

121

```

122

123

### List Transfer Configurations

124

125

Returns information about running and completed transfer configs.

126

127

```python { .api }

128

def list_transfer_configs(

129

self,

130

request: Optional[Union[ListTransferConfigsRequest, dict]] = None,

131

*,

132

parent: Optional[str] = None,

133

retry: OptionalRetry = gapic_v1.method.DEFAULT,

134

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

135

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

136

) -> pagers.ListTransferConfigsPager:

137

"""

138

Returns information about running and completed transfer configs.

139

140

Args:

141

request: The request object containing parent location.

142

parent: Required. The BigQuery project id for which transfer configs should be returned.

143

retry: Designation of what errors should be retried.

144

timeout: The timeout for this request.

145

metadata: Strings which should be sent along with the request.

146

147

Returns:

148

ListTransferConfigsResponse: The list of transfer configurations.

149

"""

150

```

151

152

## Request Types

153

154

```python { .api }

155

class CreateTransferConfigRequest:

156

"""

157

A request to create a data transfer configuration.

158

159

Attributes:

160

parent (str): Required. The BigQuery project id where the transfer configuration should be created.

161

Format: projects/{project_id}/locations/{location_id}

162

transfer_config (TransferConfig): Required. Data transfer configuration to create.

163

authorization_code (str): Optional authorization code provided by the transfer service.

164

version_info (str): Optional version info to identify the transfer configuration template.

165

service_account_name (str): Optional service account name.

166

"""

167

parent: str

168

transfer_config: TransferConfig

169

authorization_code: str

170

version_info: str

171

service_account_name: str

172

173

class UpdateTransferConfigRequest:

174

"""

175

A request to update a data transfer configuration.

176

177

Attributes:

178

transfer_config (TransferConfig): Required. Data transfer configuration to update.

179

authorization_code (str): Optional authorization code provided by the transfer service.

180

update_mask (FieldMask): Required. Field mask specifying the fields to be updated.

181

version_info (str): Optional version info to identify the transfer configuration template.

182

service_account_name (str): Optional service account name.

183

"""

184

transfer_config: TransferConfig

185

authorization_code: str

186

update_mask: field_mask_pb2.FieldMask

187

version_info: str

188

service_account_name: str

189

190

class GetTransferConfigRequest:

191

"""

192

A request to get information about a transfer config.

193

194

Attributes:

195

name (str): Required. The field will contain name of the resource requested.

196

Format: projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}

197

"""

198

name: str

199

200

class DeleteTransferConfigRequest:

201

"""

202

A request to delete a data transfer configuration.

203

204

Attributes:

205

name (str): Required. The field will contain name of the resource requested.

206

Format: projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}

207

"""

208

name: str

209

210

class ListTransferConfigsRequest:

211

"""

212

A request message for ListTransferConfigs.

213

214

Attributes:

215

parent (str): Required. The BigQuery project id for which transfer configs should be returned.

216

Format: projects/{project_id}/locations/{location_id}

217

data_source_ids (Sequence[str]): When specified, only configurations of requested data sources are returned.

218

page_token (str): Pagination token.

219

page_size (int): Page size. The default page size is the maximum value of 1000 results.

220

"""

221

parent: str

222

data_source_ids: Sequence[str]

223

page_token: str

224

page_size: int

225

```

226

227

## Response Types

228

229

```python { .api }

230

class ListTransferConfigsResponse:

231

"""

232

The returned list of pipelines in the project.

233

234

Attributes:

235

transfer_configs (Sequence[TransferConfig]): Output only. The stored pipeline transfer configurations.

236

next_page_token (str): Output only. The next-pagination token.

237

"""

238

transfer_configs: Sequence[TransferConfig]

239

next_page_token: str

240

```

241

242

## Usage Examples

243

244

### Create a Transfer Configuration

245

246

```python

247

from google.cloud import bigquery_datatransfer

248

from google.protobuf import struct_pb2

249

250

client = bigquery_datatransfer.DataTransferServiceClient()

251

252

# Create parameters as a struct

253

params = struct_pb2.Struct()

254

params.update({

255

"query": "SELECT * FROM `project.dataset.source_table` WHERE DATE(_PARTITIONTIME) = @run_date",

256

"destination_table_name_template": "destination_table_{run_date}",

257

"use_legacy_sql": False,

258

})

259

260

# Create transfer configuration

261

transfer_config = {

262

"display_name": "Daily ETL Transfer",

263

"data_source_id": "scheduled_query",

264

"destination_dataset_id": "my_dataset",

265

"schedule": "every day 08:00",

266

"params": params,

267

"email_preferences": {

268

"enable_failure_email": True

269

}

270

}

271

272

parent = f"projects/{project_id}/locations/{location}"

273

response = client.create_transfer_config(

274

parent=parent,

275

transfer_config=transfer_config

276

)

277

278

print(f"Created transfer config: {response.display_name}")

279

print(f"Config ID: {response.name}")

280

```

281

282

### Update a Transfer Configuration

283

284

```python

285

from google.cloud import bigquery_datatransfer

286

from google.protobuf import field_mask_pb2

287

288

client = bigquery_datatransfer.DataTransferServiceClient()

289

290

# Define what fields to update

291

update_mask = field_mask_pb2.FieldMask()

292

update_mask.paths.extend(["display_name", "schedule", "disabled"])

293

294

# Updated configuration

295

transfer_config = {

296

"name": f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}",

297

"display_name": "Updated Daily ETL Transfer",

298

"schedule": "every day 10:00",

299

"disabled": False

300

}

301

302

response = client.update_transfer_config(

303

transfer_config=transfer_config,

304

update_mask=update_mask

305

)

306

307

print(f"Updated transfer config: {response.display_name}")

308

```

309

310

### List Transfer Configurations

311

312

```python

313

from google.cloud import bigquery_datatransfer

314

315

client = bigquery_datatransfer.DataTransferServiceClient()

316

317

# List all transfer configs

318

parent = f"projects/{project_id}/locations/{location}"

319

response = client.list_transfer_configs(parent=parent)

320

321

print("Transfer configurations:")

322

for config in response:

323

print(f" {config.display_name}")

324

print(f" ID: {config.name}")

325

print(f" Data Source: {config.data_source_id}")

326

print(f" Schedule: {config.schedule}")

327

print(f" Disabled: {config.disabled}")

328

print(f" State: {config.state}")

329

330

# List configs for specific data sources only

331

response = client.list_transfer_configs(

332

parent=parent,

333

data_source_ids=["scheduled_query", "google_ads"]

334

)

335

336

print("Scheduled query and Google Ads configs:")

337

for config in response:

338

print(f" {config.display_name} ({config.data_source_id})")

339

```

340

341

### Get Transfer Configuration Details

342

343

```python

344

from google.cloud import bigquery_datatransfer

345

346

client = bigquery_datatransfer.DataTransferServiceClient()

347

348

# Get specific transfer config

349

config_name = f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}"

350

config = client.get_transfer_config(name=config_name)

351

352

print(f"Transfer Config: {config.display_name}")

353

print(f"Data Source: {config.data_source_id}")

354

print(f"Destination Dataset: {config.destination_dataset_id}")

355

print(f"Schedule: {config.schedule}")

356

print(f"State: {config.state}")

357

print(f"User ID: {config.user_id}")

358

359

# Print parameters

360

print("Parameters:")

361

for key, value in config.params.items():

362

print(f" {key}: {value}")

363

```

364

365

### Delete Transfer Configuration

366

367

```python

368

from google.cloud import bigquery_datatransfer

369

370

client = bigquery_datatransfer.DataTransferServiceClient()

371

372

# Delete transfer config

373

config_name = f"projects/{project_id}/locations/{location}/transferConfigs/{config_id}"

374

client.delete_transfer_config(name=config_name)

375

376

print(f"Deleted transfer config: {config_name}")

377

```