or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

common-utilities.mddata-transfers.mdfirebase.mdgcp-services.mdgoogle-ads.mdgoogle-workspace.mdindex.mdleveldb.mdmarketing-platform.md

firebase.mddocs/

0

# Firebase Integration

1

2

Google Firebase integration for Firestore database operations, enabling NoSQL database interactions in data pipelines for real-time applications and document-based data management.

3

4

## Capabilities

5

6

### Cloud Firestore

7

8

Google Cloud Firestore NoSQL document database integration for data storage, retrieval, and export operations.

9

10

```python { .api }

11

class CloudFirestoreHook(GoogleBaseHook):

12

"""

13

Hook for Google Cloud Firestore NoSQL document database.

14

15

Provides methods for database operations including document management,

16

collection queries, and database export functionality.

17

"""

18

def __init__(

19

self,

20

gcp_conn_id: str = "google_cloud_default",

21

**kwargs

22

): ...

23

24

def get_conn(self): ...

25

def export_documents(

26

self,

27

body: Dict[str, Any],

28

database: str = "(default)",

29

project_id: Optional[str] = None

30

): ...

31

def import_documents(

32

self,

33

body: Dict[str, Any],

34

database: str = "(default)",

35

project_id: Optional[str] = None

36

): ...

37

def list_collection_ids(

38

self,

39

parent: str,

40

database: str = "(default)",

41

project_id: Optional[str] = None

42

): ...

43

def list_documents(

44

self,

45

parent: str,

46

collection_id: str,

47

page_size: Optional[int] = None,

48

order_by: Optional[str] = None,

49

database: str = "(default)",

50

project_id: Optional[str] = None

51

): ...

52

def run_query(

53

self,

54

body: Dict[str, Any],

55

database: str = "(default)",

56

project_id: Optional[str] = None

57

): ...

58

59

class CloudFirestoreExportDatabaseOperator(BaseOperator):

60

"""

61

Exports Cloud Firestore database to Google Cloud Storage.

62

63

Args:

64

project_id (str): Google Cloud project ID

65

database_id (str): Firestore database ID (default: "(default)")

66

body (Dict[str, Any]): Export request body configuration

67

gcp_conn_id (str): Connection ID for Google Cloud Platform

68

69

Returns:

70

Operation result with export details and GCS output location

71

"""

72

def __init__(

73

self,

74

project_id: Optional[str] = None,

75

database_id: str = "(default)",

76

body: Dict[str, Any] = None,

77

gcp_conn_id: str = "google_cloud_default",

78

**kwargs

79

): ...

80

```

81

82

## Usage Examples

83

84

### Database Export to GCS

85

86

```python

87

from airflow import DAG

88

from airflow.providers.google.firebase.operators.firestore import CloudFirestoreExportDatabaseOperator

89

from datetime import datetime

90

91

dag = DAG(

92

'firestore_backup',

93

default_args={'start_date': datetime(2023, 1, 1)},

94

schedule_interval='@daily',

95

catchup=False

96

)

97

98

# Export Firestore database to GCS

99

export_firestore = CloudFirestoreExportDatabaseOperator(

100

task_id='export_firestore',

101

project_id='my-firebase-project',

102

database_id='(default)',

103

body={

104

'outputUriPrefix': 'gs://firestore-backups/{{ ds }}/',

105

'collectionIds': ['users', 'orders', 'products'] # Optional: specific collections

106

},

107

gcp_conn_id='google_cloud_default',

108

dag=dag

109

)

110

```

111

112

### Complete Backup Pipeline

113

114

```python

115

from airflow import DAG

116

from airflow.providers.google.firebase.operators.firestore import CloudFirestoreExportDatabaseOperator

117

from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator

118

from airflow.providers.google.cloud.sensors.gcs import GCSObjectExistenceSensor

119

from datetime import datetime

120

121

dag = DAG(

122

'firestore_backup_pipeline',

123

default_args={'start_date': datetime(2023, 1, 1)},

124

schedule_interval='@weekly',

125

catchup=False

126

)

127

128

# Ensure backup bucket exists

129

create_backup_bucket = GCSCreateBucketOperator(

130

task_id='create_backup_bucket',

131

bucket_name='firestore-backups-{{ ds_nodash }}',

132

project_id='my-firebase-project',

133

location='US',

134

dag=dag

135

)

136

137

# Export Firestore database

138

export_database = CloudFirestoreExportDatabaseOperator(

139

task_id='export_database',

140

project_id='my-firebase-project',

141

body={

142

'outputUriPrefix': 'gs://firestore-backups-{{ ds_nodash }}/full-backup/',

143

},

144

dag=dag

145

)

146

147

# Verify export completion

148

verify_export = GCSObjectExistenceSensor(

149

task_id='verify_export',

150

bucket='firestore-backups-{{ ds_nodash }}',

151

object='full-backup/',

152

timeout=1800, # 30 minutes

153

poke_interval=60,

154

dag=dag

155

)

156

157

create_backup_bucket >> export_database >> verify_export

158

```

159

160

## Types

161

162

```python { .api }

163

from typing import Dict, List, Optional, Any

164

from airflow.models import BaseOperator

165

166

# Firestore types

167

DatabaseId = str

168

CollectionId = str

169

DocumentId = str

170

FirestoreQuery = Dict[str, Any]

171

ExportRequest = Dict[str, Any]

172

ImportRequest = Dict[str, Any]

173

OperationResult = Dict[str, Any]

174

```