or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

authentication.mdchange-feeds.mddatabase-management.mddocument-operations.mderror-handling.mdhttp-adapters.mdindex.mdquery-indexing.mdreplication.mdscheduler-monitoring.mdsecurity-document.mdviews-design-documents.md

http-adapters.mddocs/

0

# HTTP Adapters

1

2

Custom HTTP transport adapters for handling rate limiting, retries, and connection management in Cloudant and CouchDB client connections.

3

4

## Capabilities

5

6

### Rate Limiting Adapter

7

8

Handle HTTP 429 "Too Many Requests" responses with automatic retry and exponential backoff.

9

10

```python { .api }

11

class Replay429Adapter(HTTPAdapter):

12

"""

13

HTTP adapter that replays requests receiving 429 Too Many Requests responses.

14

15

Implements exponential backoff strategy where sleep duration doubles

16

for each consecutive 429 response until maximum retries reached.

17

"""

18

19

def __init__(self, retries=3, initialBackoff=0.25):

20

"""

21

Initialize rate limiting adapter.

22

23

Parameters:

24

- retries (int): Maximum number of retry attempts (default: 3)

25

- initialBackoff (float): Initial backoff time in seconds (default: 0.25)

26

27

Behavior:

28

- Only retries on HTTP 429 status codes

29

- Supports all CouchDB HTTP methods (GET, HEAD, PUT, POST, DELETE, COPY)

30

- Uses exponential backoff: 0.25s, 0.5s, 1.0s, 2.0s, etc.

31

- No retries for connection or read errors (only HTTP 429)

32

"""

33

```

34

35

## Usage Examples

36

37

### Basic Rate Limiting

38

39

```python

40

from cloudant import cloudant

41

from cloudant.adapters import Replay429Adapter

42

43

# Create adapter with default settings (3 retries, 0.25s initial backoff)

44

adapter = Replay429Adapter()

45

46

with cloudant(username, password, account=account_name, adapter=adapter) as client:

47

db = client['my_database']

48

49

# Client will automatically retry on 429 responses

50

# with exponential backoff: 0.25s, 0.5s, 1.0s

51

for i in range(100):

52

doc = {'_id': f'doc_{i}', 'data': f'value_{i}'}

53

db.create_document(doc)

54

print(f"Created document {i}")

55

```

56

57

### Custom Retry Configuration

58

59

```python

60

from cloudant import cloudant

61

from cloudant.adapters import Replay429Adapter

62

63

# Custom adapter with more aggressive retry policy

64

# 5 retries starting with 1 second backoff

65

adapter = Replay429Adapter(retries=5, initialBackoff=1.0)

66

67

with cloudant(username, password, account=account_name, adapter=adapter) as client:

68

db = client['my_database']

69

70

# This will retry up to 5 times on 429 responses

71

# with backoff: 1.0s, 2.0s, 4.0s, 8.0s, 16.0s

72

try:

73

# Bulk operations that might hit rate limits

74

docs = [{'_id': f'bulk_{i}', 'value': i} for i in range(1000)]

75

result = db.bulk_docs(docs)

76

print(f"Bulk insert completed: {len(result)} documents")

77

78

except Exception as e:

79

print(f"Bulk insert failed after retries: {e}")

80

```

81

82

### Conservative Rate Limiting

83

84

```python

85

from cloudant import cloudant

86

from cloudant.adapters import Replay429Adapter

87

88

# Conservative adapter for high-volume applications

89

# Longer initial backoff to be more respectful of rate limits

90

adapter = Replay429Adapter(retries=2, initialBackoff=2.0)

91

92

with cloudant(username, password, account=account_name, adapter=adapter) as client:

93

db = client['my_database']

94

95

# Slower but more reliable for sustained high-volume operations

96

# Backoff pattern: 2.0s, 4.0s

97

query_result = db.get_query_result(

98

selector={'type': 'user'},

99

limit=10000 # Large query that might trigger rate limiting

100

)

101

102

users = list(query_result)

103

print(f"Retrieved {len(users)} users")

104

```

105

106

### Direct Client Configuration

107

108

```python

109

from cloudant.client import Cloudant

110

from cloudant.adapters import Replay429Adapter

111

112

# Configure adapter directly with client

113

adapter = Replay429Adapter(retries=4, initialBackoff=0.5)

114

115

client = Cloudant(username, password, account=account_name, adapter=adapter)

116

client.connect()

117

118

try:

119

# All requests through this client will use the rate limiting adapter

120

databases = client.all_dbs()

121

print(f"Found {len(databases)} databases")

122

123

for db_name in databases[:5]: # Process first 5 databases

124

db = client[db_name]

125

doc_count = db.doc_count()

126

print(f"Database {db_name}: {doc_count} documents")

127

128

finally:

129

client.disconnect()

130

```

131

132

### High-Volume Data Processing

133

134

```python

135

from cloudant import cloudant

136

from cloudant.adapters import Replay429Adapter

137

import time

138

139

# Adapter optimized for high-volume operations

140

adapter = Replay429Adapter(retries=10, initialBackoff=0.1)

141

142

with cloudant(username, password, account=account_name, adapter=adapter) as client:

143

db = client['large_database']

144

145

# Process large dataset with rate limit protection

146

batch_size = 100

147

total_processed = 0

148

149

# Get all document IDs

150

all_docs = db.all_docs()

151

doc_ids = [row['id'] for row in all_docs]

152

153

# Process in batches

154

for i in range(0, len(doc_ids), batch_size):

155

batch_ids = doc_ids[i:i + batch_size]

156

157

try:

158

# Bulk fetch documents

159

docs_result = db.all_docs(keys=batch_ids, include_docs=True)

160

docs = [row['doc'] for row in docs_result if 'doc' in row]

161

162

# Process documents

163

for doc in docs:

164

# Simulate processing

165

doc['processed'] = True

166

doc['processed_at'] = time.time()

167

168

# Bulk update

169

db.bulk_docs(docs)

170

total_processed += len(docs)

171

172

print(f"Processed batch {i//batch_size + 1}: {len(docs)} documents")

173

174

except Exception as e:

175

print(f"Batch {i//batch_size + 1} failed: {e}")

176

continue

177

178

print(f"Total documents processed: {total_processed}")

179

```

180

181

### Integration with Connection Pools

182

183

```python

184

from cloudant.client import Cloudant

185

from cloudant.adapters import Replay429Adapter

186

from requests.adapters import HTTPAdapter

187

from requests.packages.urllib3.util.retry import Retry

188

189

# Create custom adapter combining rate limiting with connection pooling

190

class PooledReplay429Adapter(Replay429Adapter):

191

def __init__(self, retries=3, initialBackoff=0.25, pool_connections=10, pool_maxsize=10):

192

super().__init__(retries=retries, initialBackoff=initialBackoff)

193

self.init_poolmanager(pool_connections, pool_maxsize)

194

195

# Use enhanced adapter

196

adapter = PooledReplay429Adapter(

197

retries=5,

198

initialBackoff=0.5,

199

pool_connections=20, # More connections for concurrent requests

200

pool_maxsize=20

201

)

202

203

client = Cloudant(username, password, account=account_name, adapter=adapter)

204

client.connect()

205

206

try:

207

# Client now has both rate limiting and optimized connection pooling

208

databases = client.all_dbs()

209

210

# Concurrent operations will benefit from connection pooling

211

# while still being protected against rate limiting

212

213

finally:

214

client.disconnect()

215

```

216

217

### Monitoring Adapter Behavior

218

219

```python

220

from cloudant import cloudant

221

from cloudant.adapters import Replay429Adapter

222

import logging

223

224

# Enable urllib3 debug logging to see retry behavior

225

logging.basicConfig(level=logging.DEBUG)

226

urllib3_logger = logging.getLogger('urllib3')

227

urllib3_logger.setLevel(logging.DEBUG)

228

229

# Create adapter

230

adapter = Replay429Adapter(retries=3, initialBackoff=1.0)

231

232

with cloudant(username, password, account=account_name, adapter=adapter) as client:

233

db = client['test_database']

234

235

try:

236

# This operation might trigger rate limiting

237

# Check logs to see retry behavior

238

for i in range(50):

239

doc = {'_id': f'test_doc_{i}', 'timestamp': time.time()}

240

result = db.create_document(doc)

241

print(f"Document {i}: {result['ok']}")

242

243

except Exception as e:

244

print(f"Operation failed: {e}")

245

```

246

247

## Error Handling

248

249

Rate limiting adapters handle HTTP 429 responses automatically, but other errors should still be caught:

250

251

```python

252

from cloudant import cloudant

253

from cloudant.adapters import Replay429Adapter

254

from cloudant.error import CloudantException

255

from requests.exceptions import RequestException

256

257

adapter = Replay429Adapter(retries=3, initialBackoff=0.25)

258

259

with cloudant(username, password, account=account_name, adapter=adapter) as client:

260

db = client['my_database']

261

262

try:

263

# Rate limiting is handled automatically

264

result = db.create_document({'test': 'data'})

265

266

except CloudantException as e:

267

# Cloudant-specific errors (authentication, permissions, etc.)

268

print(f"Cloudant error: {e}")

269

270

except RequestException as e:

271

# Network errors, timeouts, etc. that aren't retryable

272

print(f"Request error: {e}")

273

274

except Exception as e:

275

# Other errors

276

print(f"Unexpected error: {e}")

277

```

278

279

## Types

280

281

```python { .api }

282

# Adapter configuration

283

AdapterConfig = dict[str, Any]

284

285

# HTTP methods supported for retries

286

SUPPORTED_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'POST', 'DELETE', 'COPY'])

287

288

# Retry configuration

289

RetryConfig = dict[str, Any]

290

```

291

292

## Import Statements

293

294

```python

295

from cloudant.adapters import Replay429Adapter

296

from requests.adapters import HTTPAdapter

297

```