or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

data-sources.mddata-types.mdindex.mdscheduling.mdservice-clients.mdtransfer-configs.mdtransfer-runs.md

data-sources.mddocs/

0

# Data Source Management

1

2

Operations for discovering and managing available data sources, including listing supported sources, retrieving source configurations, and validating credentials for data source connections.

3

4

## Capabilities

5

6

### Get Data Source

7

8

Retrieves a supported data source configuration and its parameters.

9

10

```python { .api }

11

def get_data_source(

12

self,

13

request: Optional[Union[GetDataSourceRequest, dict]] = None,

14

*,

15

name: Optional[str] = None,

16

retry: OptionalRetry = gapic_v1.method.DEFAULT,

17

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

18

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

19

) -> DataSource:

20

"""

21

Retrieves a supported data source and returns its settings.

22

23

Args:

24

request: The request object containing data source name.

25

name: Required. The field will contain name of the resource requested.

26

retry: Designation of what errors should be retried.

27

timeout: The timeout for this request.

28

metadata: Strings which should be sent along with the request.

29

30

Returns:

31

DataSource: The requested data source configuration.

32

"""

33

```

34

35

### List Data Sources

36

37

Lists supported data sources and returns their settings.

38

39

```python { .api }

40

def list_data_sources(

41

self,

42

request: Optional[Union[ListDataSourcesRequest, dict]] = None,

43

*,

44

parent: Optional[str] = None,

45

retry: OptionalRetry = gapic_v1.method.DEFAULT,

46

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

47

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

48

) -> pagers.ListDataSourcesPager:

49

"""

50

Lists supported data sources and returns their settings.

51

52

Args:

53

request: The request object containing parent location.

54

parent: Required. The BigQuery project id for which data sources should be returned.

55

retry: Designation of what errors should be retried.

56

timeout: The timeout for this request.

57

metadata: Strings which should be sent along with the request.

58

59

Returns:

60

ListDataSourcesResponse: The list of supported data sources.

61

"""

62

```

63

64

### Check Valid Credentials

65

66

Checks if credentials are valid for a data source.

67

68

```python { .api }

69

def check_valid_creds(

70

self,

71

request: Optional[Union[CheckValidCredsRequest, dict]] = None,

72

*,

73

name: Optional[str] = None,

74

retry: OptionalRetry = gapic_v1.method.DEFAULT,

75

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

76

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

77

) -> CheckValidCredsResponse:

78

"""

79

Returns true if valid credentials exist for the given data source and requesting user.

80

81

Args:

82

request: The request object containing data source name.

83

name: Required. The data source in the form projects/{project_id}/dataSources/{data_source_id}.

84

retry: Designation of what errors should be retried.

85

timeout: The timeout for this request.

86

metadata: Strings which should be sent along with the request.

87

88

Returns:

89

CheckValidCredsResponse: Whether credentials are valid.

90

"""

91

```

92

93

### Enroll Data Sources

94

95

Enrolls data sources in a user project.

96

97

```python { .api }

98

def enroll_data_sources(

99

self,

100

request: Optional[Union[EnrollDataSourcesRequest, dict]] = None,

101

*,

102

retry: OptionalRetry = gapic_v1.method.DEFAULT,

103

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

104

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

105

) -> None:

106

"""

107

Enroll data sources in a user project.

108

109

Args:

110

request: The request object containing data sources to enroll.

111

retry: Designation of what errors should be retried.

112

timeout: The timeout for this request.

113

metadata: Strings which should be sent along with the request.

114

"""

115

```

116

117

### Unenroll Data Sources

118

119

Unenrolls data sources in a user project.

120

121

```python { .api }

122

def unenroll_data_sources(

123

self,

124

request: Optional[Union[UnenrollDataSourcesRequest, dict]] = None,

125

*,

126

retry: OptionalRetry = gapic_v1.method.DEFAULT,

127

timeout: Union[float, object] = gapic_v1.method.DEFAULT,

128

metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),

129

) -> None:

130

"""

131

Unenroll data sources in a user project.

132

133

Args:

134

request: The request object containing data sources to unenroll.

135

retry: Designation of what errors should be retried.

136

timeout: The timeout for this request.

137

metadata: Strings which should be sent along with the request.

138

"""

139

```

140

141

## Request Types

142

143

```python { .api }

144

class GetDataSourceRequest:

145

"""

146

A request to retrieve a data source configuration.

147

148

Attributes:

149

name (str): Required. The field will contain name of the resource requested.

150

Format: projects/{project}/locations/{location}/dataSources/{data_source}

151

"""

152

name: str

153

154

class ListDataSourcesRequest:

155

"""

156

Request message for ListDataSources.

157

158

Attributes:

159

parent (str): Required. The BigQuery project id for which data sources should be returned.

160

Format: projects/{project_id}/locations/{location_id}

161

page_token (str): Pagination token, which can be used to request a specific page.

162

page_size (int): Page size. The default page size is the maximum value of 1000 results.

163

"""

164

parent: str

165

page_token: str

166

page_size: int

167

168

class CheckValidCredsRequest:

169

"""

170

A request to check if credentials are valid for the given data source.

171

172

Attributes:

173

name (str): Required. The data source in the form:

174

projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}

175

"""

176

name: str

177

178

class EnrollDataSourcesRequest:

179

"""

180

A request to enroll data sources in a user project.

181

182

Attributes:

183

name (str): Required. The name of the project resource.

184

Format: projects/{project_id}

185

data_source_ids (Sequence[str]): Data sources that are enrolled.

186

"""

187

name: str

188

data_source_ids: Sequence[str]

189

190

class UnenrollDataSourcesRequest:

191

"""

192

A request to unenroll data sources in a user project.

193

194

Attributes:

195

name (str): Required. The name of the project resource.

196

Format: projects/{project_id}

197

data_source_ids (Sequence[str]): Data sources that are unenrolled.

198

"""

199

name: str

200

data_source_ids: Sequence[str]

201

```

202

203

## Response Types

204

205

```python { .api }

206

class ListDataSourcesResponse:

207

"""

208

Returns list of supported data sources and their metadata.

209

210

Attributes:

211

data_sources (Sequence[DataSource]): List of supported data sources.

212

next_page_token (str): Output only. The next-pagination token.

213

"""

214

data_sources: Sequence[DataSource]

215

next_page_token: str

216

217

class CheckValidCredsResponse:

218

"""

219

A response indicating whether the credentials exist and are valid.

220

221

Attributes:

222

has_valid_creds (bool): If set to true, the credentials exist and are valid.

223

"""

224

has_valid_creds: bool

225

```

226

227

## Usage Examples

228

229

### List Available Data Sources

230

231

```python

232

from google.cloud import bigquery_datatransfer

233

234

client = bigquery_datatransfer.DataTransferServiceClient()

235

236

# List all data sources for a project/location

237

parent = f"projects/{project_id}/locations/{location}"

238

response = client.list_data_sources(parent=parent)

239

240

print("Available data sources:")

241

for data_source in response:

242

print(f" {data_source.display_name} ({data_source.data_source_id})")

243

print(f" Description: {data_source.description}")

244

print(f" Transfer type: {data_source.transfer_type}")

245

print(f" Supports multiple transfers: {data_source.supports_multiple_transfers}")

246

```

247

248

### Get Specific Data Source Details

249

250

```python

251

from google.cloud import bigquery_datatransfer

252

253

client = bigquery_datatransfer.DataTransferServiceClient()

254

255

# Get details for a specific data source

256

data_source_name = f"projects/{project_id}/locations/{location}/dataSources/scheduled_query"

257

data_source = client.get_data_source(name=data_source_name)

258

259

print(f"Data Source: {data_source.display_name}")

260

print(f"Authorization Type: {data_source.authorization_type}")

261

print(f"Data Refresh Type: {data_source.data_refresh_type}")

262

263

print("Parameters:")

264

for param in data_source.parameters:

265

print(f" {param.param_id}: {param.display_name}")

266

print(f" Type: {param.type_}")

267

print(f" Required: {param.required}")

268

if param.description:

269

print(f" Description: {param.description}")

270

```

271

272

### Check Data Source Credentials

273

274

```python

275

from google.cloud import bigquery_datatransfer

276

277

client = bigquery_datatransfer.DataTransferServiceClient()

278

279

# Check if credentials are valid for a data source

280

data_source_name = f"projects/{project_id}/locations/{location}/dataSources/google_ads"

281

response = client.check_valid_creds(name=data_source_name)

282

283

if response.has_valid_creds:

284

print("Valid credentials exist for this data source")

285

else:

286

print("No valid credentials found - authorization required")

287

```

288

289

### Enroll Data Sources

290

291

```python

292

from google.cloud import bigquery_datatransfer

293

294

client = bigquery_datatransfer.DataTransferServiceClient()

295

296

# Enroll specific data sources for a project

297

request = {

298

"name": f"projects/{project_id}",

299

"data_source_ids": ["google_ads", "google_analytics", "youtube_channel"]

300

}

301

302

client.enroll_data_sources(request=request)

303

print("Data sources enrolled successfully")

304

```