or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

core-concurrency.mddebugging.mdgreen-stdlib.mdindex.mdmonkey-patching.mdnetworking.mdresource-pooling.mdsynchronization.mdthread-pools.mdweb-server.md

monkey-patching.mddocs/

0

# Monkey Patching

1

2

System for transparently replacing standard library modules with green cooperative versions. Monkey patching enables existing code to work with eventlet's cooperative threading model without modification.

3

4

## Capabilities

5

6

### Global Monkey Patching

7

8

Replace standard library modules globally with green cooperative versions.

9

10

```python { .api }

11

def monkey_patch(os=True, select=True, socket=True, thread=True,

12

time=True, ssl=True, httplib=False,

13

subprocess=False, all=None, profile=False,

14

aggressive=True):

15

"""

16

Globally patch system modules to be greenthread-friendly.

17

18

Parameters:

19

- os: bool, patch os module (default: True)

20

- select: bool, patch select module (default: True)

21

- socket: bool, patch socket module (default: True)

22

- thread: bool, patch thread module (default: True)

23

- time: bool, patch time module (default: True)

24

- ssl: bool, patch ssl module (default: True)

25

- httplib: bool, patch http.client module (default: False)

26

- subprocess: bool, patch subprocess module (default: False)

27

- all: bool, patch all available modules if True

28

- profile: bool, enable profiling of patched modules

29

- aggressive: bool, whether to be aggressive about patching

30

31

Returns:

32

None

33

34

Note:

35

Should be called early in program execution, before importing

36

other modules that might use the standard library modules.

37

"""

38

```

39

40

### Selective Module Import

41

42

Import specific modules with green versions without global patching.

43

44

```python { .api }

45

def import_patched(modulename, *additional_modules, **kw_additional_modules):

46

"""

47

Import a module with green versions of standard library components.

48

49

Parameters:

50

- modulename: str, name of module to import with patching

51

- *additional_modules: additional module names to patch

52

- **kw_additional_modules: keyword arguments for module-specific options

53

54

Returns:

55

The imported module with green versions

56

57

Example:

58

urllib2 = import_patched('urllib2')

59

"""

60

```

61

62

### Patching Status

63

64

Check if modules have been monkey patched.

65

66

```python { .api }

67

def is_monkey_patched(module):

68

"""

69

Check if a module has been monkey patched.

70

71

Parameters:

72

- module: str or module object to check

73

74

Returns:

75

bool: True if module is patched, False otherwise

76

"""

77

```

78

79

### Low-level Patching

80

81

Direct injection of green modules for advanced use cases.

82

83

```python { .api }

84

def inject(module_name, new_globals, *additional_modules, **kw_additional_modules):

85

"""

86

Base method for injecting greenified modules into other modules.

87

88

Parameters:

89

- module_name: str, name of module to inject into

90

- new_globals: dict, global variables to inject

91

- *additional_modules: additional modules to process

92

- **kw_additional_modules: keyword arguments for modules

93

94

Returns:

95

The modified module

96

"""

97

```

98

99

## Usage Examples

100

101

### Basic Monkey Patching

102

103

```python

104

import eventlet

105

106

# Enable monkey patching at the start of your program

107

# This must be done before importing other modules

108

eventlet.monkey_patch()

109

110

# Now you can use standard library modules that will be cooperative

111

import socket

112

import urllib.request

113

import threading

114

import time

115

116

def fetch_url(url):

117

"""This will use green socket operations automatically"""

118

try:

119

response = urllib.request.urlopen(url)

120

data = response.read()

121

return f"Fetched {len(data)} bytes from {url}"

122

except Exception as e:

123

return f"Error fetching {url}: {e}"

124

125

def main():

126

"""Concurrent URL fetching using standard library"""

127

urls = [

128

'http://example.com',

129

'http://httpbin.org/delay/1',

130

'http://httpbin.org/delay/2',

131

'http://httpbin.org/json'

132

]

133

134

# Spawn greenthreads using patched modules

135

greenthreads = []

136

for url in urls:

137

gt = eventlet.spawn(fetch_url, url)

138

greenthreads.append(gt)

139

140

# Collect results

141

for gt in greenthreads:

142

result = gt.wait()

143

print(result)

144

145

if __name__ == "__main__":

146

main()

147

```

148

149

### Selective Patching

150

151

```python

152

import eventlet

153

154

# Only patch specific modules

155

eventlet.monkey_patch(socket=True, time=True, thread=False, ssl=False)

156

157

import socket

158

import time

159

import threading # This will still be regular threading

160

161

def test_selective_patching():

162

"""Test that only selected modules are patched"""

163

164

# Check patching status

165

print(f"Socket patched: {eventlet.patcher.is_monkey_patched('socket')}")

166

print(f"Time patched: {eventlet.patcher.is_monkey_patched('time')}")

167

print(f"Threading patched: {eventlet.patcher.is_monkey_patched('threading')}")

168

169

# Socket operations will be green

170

sock = socket.socket()

171

print(f"Socket type: {type(sock)}")

172

173

# Time.sleep will yield to other greenthreads

174

start = time.time()

175

time.sleep(1)

176

elapsed = time.time() - start

177

print(f"Sleep took {elapsed:.2f} seconds")

178

179

# Threading will still create OS threads

180

thread = threading.Thread(target=lambda: print("OS thread"))

181

thread.start()

182

thread.join()

183

184

if __name__ == "__main__":

185

test_selective_patching()

186

```

187

188

### Import Patched Modules

189

190

```python

191

import eventlet

192

193

# Import specific modules with green versions

194

# without global monkey patching

195

green_urllib = eventlet.import_patched('urllib.request')

196

green_socket = eventlet.import_patched('socket')

197

198

def fetch_with_green_urllib(url):

199

"""Use specifically imported green urllib"""

200

try:

201

response = green_urllib.urlopen(url)

202

data = response.read()

203

return data

204

except Exception as e:

205

return f"Error: {e}"

206

207

def server_with_green_socket():

208

"""Use specifically imported green socket"""

209

server_sock = green_socket.socket()

210

server_sock.bind(('localhost', 8080))

211

server_sock.listen(5)

212

213

print("Server listening with green socket")

214

215

while True:

216

client_sock, addr = server_sock.accept()

217

eventlet.spawn(handle_client, client_sock, addr)

218

219

def handle_client(sock, addr):

220

"""Handle client connection"""

221

try:

222

data = sock.recv(1024)

223

sock.send(b"Echo: " + data)

224

finally:

225

sock.close()

226

227

if __name__ == "__main__":

228

# Can use both approaches simultaneously

229

eventlet.spawn(server_with_green_socket)

230

231

result = fetch_with_green_urllib('http://example.com')

232

print(f"Fetched: {len(result) if isinstance(result, bytes) else result}")

233

```

234

235

### Database Connection Example

236

237

```python

238

import eventlet

239

240

# Monkey patch before importing database modules

241

eventlet.monkey_patch()

242

243

import psycopg2 # PostgreSQL adapter - now cooperative

244

import mysql.connector # MySQL adapter - now cooperative

245

246

def query_postgresql(query):

247

"""Query PostgreSQL database with green connection"""

248

try:

249

conn = psycopg2.connect(

250

host="localhost",

251

database="testdb",

252

user="user",

253

password="password"

254

)

255

256

cursor = conn.cursor()

257

cursor.execute(query)

258

results = cursor.fetchall()

259

260

cursor.close()

261

conn.close()

262

263

return results

264

except Exception as e:

265

return f"PostgreSQL error: {e}"

266

267

def query_mysql(query):

268

"""Query MySQL database with green connection"""

269

try:

270

conn = mysql.connector.connect(

271

host="localhost",

272

database="testdb",

273

user="user",

274

password="password"

275

)

276

277

cursor = conn.cursor()

278

cursor.execute(query)

279

results = cursor.fetchall()

280

281

cursor.close()

282

conn.close()

283

284

return results

285

except Exception as e:

286

return f"MySQL error: {e}"

287

288

def concurrent_database_queries():

289

"""Run multiple database queries concurrently"""

290

291

queries = [

292

"SELECT COUNT(*) FROM users",

293

"SELECT COUNT(*) FROM orders",

294

"SELECT COUNT(*) FROM products",

295

"SELECT AVG(price) FROM products"

296

]

297

298

# Run PostgreSQL queries concurrently

299

pg_greenthreads = []

300

for query in queries:

301

gt = eventlet.spawn(query_postgresql, query)

302

pg_greenthreads.append(gt)

303

304

# Run MySQL queries concurrently

305

mysql_greenthreads = []

306

for query in queries:

307

gt = eventlet.spawn(query_mysql, query)

308

mysql_greenthreads.append(gt)

309

310

# Collect results

311

print("PostgreSQL results:")

312

for gt in pg_greenthreads:

313

result = gt.wait()

314

print(f" {result}")

315

316

print("MySQL results:")

317

for gt in mysql_greenthreads:

318

result = gt.wait()

319

print(f" {result}")

320

321

if __name__ == "__main__":

322

concurrent_database_queries()

323

```

324

325

### Web Scraping with Monkey Patching

326

327

```python

328

import eventlet

329

330

# Enable monkey patching for web scraping

331

eventlet.monkey_patch()

332

333

import urllib.request

334

import urllib.parse

335

import json

336

import time

337

338

def scrape_url(url):

339

"""Scrape a single URL"""

340

start_time = time.time()

341

342

try:

343

request = urllib.request.Request(url)

344

request.add_header('User-Agent', 'Eventlet Scraper 1.0')

345

346

response = urllib.request.urlopen(request, timeout=10)

347

data = response.read()

348

349

elapsed = time.time() - start_time

350

351

return {

352

'url': url,

353

'status': response.getcode(),

354

'size': len(data),

355

'time': elapsed

356

}

357

358

except Exception as e:

359

elapsed = time.time() - start_time

360

return {

361

'url': url,

362

'error': str(e),

363

'time': elapsed

364

}

365

366

def concurrent_scraping():

367

"""Scrape multiple URLs concurrently"""

368

369

urls = [

370

'http://example.com',

371

'http://httpbin.org/delay/1',

372

'http://httpbin.org/delay/2',

373

'http://httpbin.org/json',

374

'http://httpbin.org/user-agent',

375

'http://httpbin.org/headers',

376

'http://httpbin.org/ip',

377

'http://httpbin.org/status/200'

378

]

379

380

print(f"Starting concurrent scraping of {len(urls)} URLs...")

381

start_time = time.time()

382

383

# Spawn greenthreads for each URL

384

greenthreads = []

385

for url in urls:

386

gt = eventlet.spawn(scrape_url, url)

387

greenthreads.append(gt)

388

389

# Collect results as they complete

390

results = []

391

for gt in greenthreads:

392

result = gt.wait()

393

results.append(result)

394

395

if 'error' in result:

396

print(f"❌ {result['url']}: {result['error']} ({result['time']:.2f}s)")

397

else:

398

print(f"✅ {result['url']}: {result['status']} - {result['size']} bytes ({result['time']:.2f}s)")

399

400

total_time = time.time() - start_time

401

successful = len([r for r in results if 'error' not in r])

402

403

print(f"\nCompleted {len(urls)} requests in {total_time:.2f}s")

404

print(f"Success rate: {successful}/{len(urls)}")

405

406

if __name__ == "__main__":

407

concurrent_scraping()

408

```

409

410

### HTTP Client with Session Management

411

412

```python

413

import eventlet

414

415

# Enable monkey patching for HTTP libraries

416

eventlet.monkey_patch()

417

418

import urllib.request

419

import urllib.parse

420

import http.cookiejar

421

import json

422

423

class GreenHTTPSession:

424

"""HTTP session using green urllib with cookie support"""

425

426

def __init__(self):

427

self.cookie_jar = http.cookiejar.CookieJar()

428

self.opener = urllib.request.build_opener(

429

urllib.request.HTTPCookieProcessor(self.cookie_jar)

430

)

431

self.opener.addheaders = [('User-Agent', 'GreenHTTPSession/1.0')]

432

433

def get(self, url, headers=None):

434

"""Perform GET request"""

435

request = urllib.request.Request(url)

436

437

if headers:

438

for key, value in headers.items():

439

request.add_header(key, value)

440

441

response = self.opener.open(request)

442

return {

443

'status': response.getcode(),

444

'headers': dict(response.headers),

445

'data': response.read(),

446

'url': response.geturl()

447

}

448

449

def post(self, url, data=None, headers=None):

450

"""Perform POST request"""

451

if isinstance(data, dict):

452

data = urllib.parse.urlencode(data).encode('utf-8')

453

454

request = urllib.request.Request(url, data=data, method='POST')

455

456

if headers:

457

for key, value in headers.items():

458

request.add_header(key, value)

459

460

response = self.opener.open(request)

461

return {

462

'status': response.getcode(),

463

'headers': dict(response.headers),

464

'data': response.read(),

465

'url': response.geturl()

466

}

467

468

def test_http_session():

469

"""Test HTTP session with concurrent requests"""

470

471

session = GreenHTTPSession()

472

473

def make_requests():

474

"""Make multiple requests with the same session"""

475

try:

476

# Login request (sets cookies)

477

login_data = {'username': 'test', 'password': 'test'}

478

login_response = session.post('http://httpbin.org/post', login_data)

479

print(f"Login: {login_response['status']}")

480

481

# Authenticated requests (uses cookies)

482

auth_response = session.get('http://httpbin.org/cookies')

483

print(f"Auth check: {auth_response['status']}")

484

485

# API call

486

api_response = session.get(

487

'http://httpbin.org/json',

488

headers={'Accept': 'application/json'}

489

)

490

print(f"API call: {api_response['status']}")

491

492

return "Session requests completed"

493

494

except Exception as e:

495

return f"Session error: {e}"

496

497

# Run multiple sessions concurrently

498

greenthreads = []

499

for i in range(3):

500

gt = eventlet.spawn(make_requests)

501

greenthreads.append(gt)

502

503

# Wait for all sessions to complete

504

for gt in greenthreads:

505

result = gt.wait()

506

print(result)

507

508

if __name__ == "__main__":

509

test_http_session()

510

```

511

512

## Monkey Patching Best Practices

513

514

### Safe Patching Order

515

516

```python

517

import eventlet

518

519

# 1. Monkey patch FIRST, before any other imports

520

eventlet.monkey_patch()

521

522

# 2. Then import standard library modules

523

import socket

524

import urllib.request

525

import threading

526

import time

527

528

# 3. Then import third-party modules

529

import requests # Will use patched socket

530

import psycopg2 # Will use patched socket

531

532

# 4. Finally import your application modules

533

import myapp.models

534

import myapp.views

535

```

536

537

### Conditional Patching

538

539

```python

540

import os

541

import eventlet

542

543

# Only enable monkey patching in certain environments

544

if os.environ.get('EVENTLET_ENABLED', '').lower() == 'true':

545

print("Enabling eventlet monkey patching")

546

eventlet.monkey_patch()

547

else:

548

print("Running without eventlet")

549

550

# Rest of application can work with or without patching

551

import socket

552

import time

553

554

def main():

555

# Code works the same either way

556

sock = socket.socket()

557

time.sleep(1)

558

print("Application running")

559

560

if __name__ == "__main__":

561

main()

562

```

563

564

### Patching Verification

565

566

```python

567

import eventlet

568

569

eventlet.monkey_patch()

570

571

def verify_patching():

572

"""Verify that monkey patching worked correctly"""

573

574

import socket

575

import time

576

import threading

577

import ssl

578

579

modules_to_check = ['socket', 'time', 'threading', 'ssl']

580

581

print("Monkey patching verification:")

582

for module_name in modules_to_check:

583

is_patched = eventlet.patcher.is_monkey_patched(module_name)

584

status = "✅ PATCHED" if is_patched else "❌ NOT PATCHED"

585

print(f" {module_name}: {status}")

586

587

# Test that patched modules work

588

print("\nTesting patched functionality:")

589

590

# Test socket

591

try:

592

sock = socket.socket()

593

print(" Socket creation: ✅")

594

sock.close()

595

except Exception as e:

596

print(f" Socket creation: ❌ {e}")

597

598

# Test time.sleep yields to other greenthreads

599

try:

600

start = time.time()

601

time.sleep(0.1)

602

elapsed = time.time() - start

603

print(f" Time.sleep: ✅ ({elapsed:.3f}s)")

604

except Exception as e:

605

print(f" Time.sleep: ❌ {e}")

606

607

if __name__ == "__main__":

608

verify_patching()

609

```