or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

collections.mdcore-utilities.mdextended.mdindex.mdmetaprogramming.mdnetworking.mdparallel.mdsystem-integration.mdtesting.mdxml-html.md

system-integration.mddocs/

0

# System Integration

1

2

Command-line argument parsing, documentation tools, file system utilities, and development aids that bridge fastcore functionality with system-level operations. This module combines script.py, docments.py, and related utilities for building command-line applications and development tools.

3

4

## Capabilities

5

6

### Command-Line Argument Parsing

7

8

Advanced argument parsing system that automatically generates CLI interfaces from Python function signatures.

9

10

```python { .api }

11

def call_parse(func=None, **kwargs):

12

"""

13

Parse command-line arguments and call function with parsed values.

14

15

Automatically creates argument parser from function signature and

16

docstrings, then calls the function with parsed command-line arguments.

17

This is the main entry point for converting functions into CLI tools.

18

19

Parameters:

20

- func: function to convert to CLI (uses caller if None)

21

- **kwargs: additional arguments for argument parser

22

23

Usage:

24

def main(input_file, output_file=None, verbose=False):

25

'''Process input file and create output.

26

27

input_file: Path to input file

28

output_file: Path to output file (optional)

29

verbose: Enable verbose logging

30

'''

31

# Function implementation here

32

pass

33

34

if __name__ == "__main__":

35

call_parse(main)

36

"""

37

38

class Param:

39

"""

40

Parameter specification for command-line arguments.

41

42

Provides detailed control over how function parameters are converted

43

to command-line arguments, including type conversion, validation,

44

and help text generation.

45

46

Parameters:

47

- help: str, help text for the parameter

48

- type: callable, type conversion function

49

- opt: bool, whether parameter is optional (flag-style)

50

- action: str, argparse action ('store_true', 'store_false', etc.)

51

- nargs: str|int, number of arguments to consume

52

- const: value for const action

53

- choices: list, allowed values for parameter

54

- required: bool, whether parameter is required

55

- default: default value for parameter

56

- version: str, version string for --version

57

58

Usage:

59

def process_data(

60

input_file: Param("Input file path", type=str),

61

verbose: Param("Enable verbose output", action='store_true'),

62

format: Param("Output format", choices=['json', 'csv', 'xml'], default='json')

63

):

64

pass

65

"""

66

67

def __init__(self, help="", type=None, opt=True, action=None, nargs=None,

68

const=None, choices=None, required=None, default=None, version=None): ...

69

70

def set_default(self, d): ...

71

72

@property

73

def pre(self): ...

74

75

@property

76

def kwargs(self): ...

77

78

def anno_parser(func, prog=None):

79

"""

80

Create ArgumentParser from function annotations and docstrings.

81

82

Analyzes function signature, type hints, and docstrings to automatically

83

generate comprehensive argument parser with proper help text and validation.

84

85

Parameters:

86

- func: function to analyze

87

- prog: str, program name (auto-detected if None)

88

89

Returns:

90

argparse.ArgumentParser: Configured parser ready for use

91

"""

92

93

def args_from_prog(func, prog):

94

"""

95

Extract arguments from program string for testing.

96

97

Parses specially formatted program strings to extract argument

98

values for testing command-line interfaces programmatically.

99

100

Parameters:

101

- func: function being tested

102

- prog: str, program string with embedded arguments

103

104

Returns:

105

dict: Extracted argument values

106

"""

107

108

def store_true():

109

"""Placeholder for store_true action in Param definitions."""

110

111

def store_false():

112

"""Placeholder for store_false action in Param definitions."""

113

114

def bool_arg(v):

115

"""

116

Type converter for boolean command-line arguments.

117

118

Converts string arguments to boolean values with flexible parsing

119

that handles various common boolean representations.

120

121

Parameters:

122

- v: str, string value to convert

123

124

Returns:

125

bool: Converted boolean value

126

127

Accepts: true/false, yes/no, 1/0, on/off (case-insensitive)

128

"""

129

```

130

131

### Documentation Extraction and Analysis

132

133

Tools for extracting and processing documentation from Python code.

134

135

```python { .api }

136

def docstring(sym):

137

"""

138

Extract docstring from symbol (function, class, or module).

139

140

Retrieves docstring with fallback to __init__ method for classes

141

and proper handling of various symbol types.

142

143

Parameters:

144

- sym: function, class, module, or string

145

146

Returns:

147

str: Extracted docstring or empty string

148

"""

149

150

def parse_docstring(sym):

151

"""

152

Parse numpy-style docstring into structured components.

153

154

Extracts and structures docstring components including parameters,

155

returns, examples, and other sections using numpy docstring format.

156

157

Parameters:

158

- sym: function, class, or docstring to parse

159

160

Returns:

161

AttrDict: Structured docstring components

162

"""

163

164

def docments(elt, full=False, **kwargs):

165

"""

166

Extract parameter documentation from function comments and annotations.

167

168

Analyzes function source code to extract parameter documentation

169

from both docstrings and inline comments, creating comprehensive

170

parameter information for API documentation.

171

172

Parameters:

173

- elt: function or class to document

174

- full: bool, include all parameter details

175

- **kwargs: additional options for extraction

176

177

Returns:

178

dict: Parameter documentation mapping names to details

179

"""

180

181

def get_source(s):

182

"""

183

Get source code for function, class, or dataclass.

184

185

Retrieves source code with proper handling of different object types

186

including functions, methods, and dataclasses.

187

188

Parameters:

189

- s: str|function|class, object to get source for

190

191

Returns:

192

str: Source code or None if unavailable

193

"""

194

195

def get_name(s):

196

"""

197

Get qualified name for object.

198

199

Returns the fully qualified name including module and class context

200

for proper object identification.

201

202

Parameters:

203

- s: object to get name for

204

205

Returns:

206

str: Qualified name

207

"""

208

209

def qual_name(o):

210

"""

211

Get qualified name with module information.

212

213

Parameters:

214

- o: object to get qualified name for

215

216

Returns:

217

str: Fully qualified name including module

218

"""

219

220

def sig2str(sig):

221

"""

222

Convert function signature to string representation.

223

224

Parameters:

225

- sig: inspect.Signature object

226

227

Returns:

228

str: String representation of signature

229

"""

230

231

def extract_docstrings(source):

232

"""

233

Extract all docstrings from Python source code.

234

235

Parses source code to find and extract all docstrings including

236

module, class, and function docstrings with location information.

237

238

Parameters:

239

- source: str, Python source code

240

241

Returns:

242

dict: Mapping of locations to docstrings

243

"""

244

```

245

246

### Development Utilities

247

248

Helper functions and classes for development workflow and code analysis.

249

250

```python { .api }

251

def isdataclass(s):

252

"""

253

Check if object is a dataclass class (not instance).

254

255

Parameters:

256

- s: object to check

257

258

Returns:

259

bool: True if s is dataclass class

260

"""

261

262

def get_dataclass_source(s):

263

"""

264

Get source code for dataclass with special handling.

265

266

Parameters:

267

- s: dataclass to get source for

268

269

Returns:

270

str: Source code or empty string

271

"""

272

273

def clean_type_str(x):

274

"""

275

Clean up type string representation for display.

276

277

Removes verbose type information and formatting for cleaner

278

display in help text and documentation.

279

280

Parameters:

281

- x: str, type string to clean

282

283

Returns:

284

str: Cleaned type string

285

"""

286

287

empty = Parameter.empty

288

"""Sentinel value representing empty/missing parameters."""

289

290

SCRIPT_INFO = {}

291

"""Global dictionary for storing script information and metadata."""

292

```

293

294

## Usage Examples

295

296

### Creating Command-Line Applications

297

298

```python

299

from fastcore.script import call_parse, Param

300

301

# Simple CLI application

302

def process_file(

303

input_file: Param("Path to input file", type=str),

304

output_file: Param("Path to output file", type=str, default="output.txt"),

305

verbose: Param("Enable verbose output", action='store_true'),

306

format: Param("Output format", choices=['json', 'csv', 'xml'], default='json')

307

):

308

"""

309

Process input file and generate output in specified format.

310

311

This tool reads the input file, processes the data, and writes

312

the results to the output file in the specified format.

313

"""

314

if verbose:

315

print(f"Processing {input_file}...")

316

print(f"Output format: {format}")

317

318

# Processing logic here

319

with open(input_file, 'r') as f:

320

data = f.read()

321

322

# Transform data based on format

323

if format == 'json':

324

import json

325

result = json.dumps({"data": data, "processed": True})

326

elif format == 'csv':

327

result = f"data,processed\n{data},true"

328

else: # xml

329

result = f"<root><data>{data}</data><processed>true</processed></root>"

330

331

with open(output_file, 'w') as f:

332

f.write(result)

333

334

if verbose:

335

print(f"Output written to {output_file}")

336

337

if __name__ == "__main__":

338

call_parse(process_file)

339

340

# Usage from command line:

341

# python script.py input.txt --output-file result.json --verbose --format json

342

```

343

344

### Advanced Parameter Handling

345

346

```python

347

from fastcore.script import call_parse, Param, bool_arg

348

349

def data_analysis(

350

dataset: Param("Dataset file path"),

351

model_type: Param("ML model type", choices=['linear', 'tree', 'neural'], default='linear'),

352

train_size: Param("Training set size", type=float, default=0.8),

353

random_seed: Param("Random seed for reproducibility", type=int, default=42),

354

normalize: Param("Normalize features", type=bool_arg, default=True),

355

output_dir: Param("Output directory", default="./results"),

356

verbose: Param("Verbose output", action='store_true'),

357

debug: Param("Debug mode", action='store_true'),

358

config_file: Param("Configuration file", required=False),

359

gpu_count: Param("Number of GPUs to use", type=int, nargs='?', const=1, default=0)

360

):

361

"""

362

Perform data analysis with machine learning models.

363

364

Analyzes the provided dataset using the specified model type

365

and configuration parameters. Results are saved to the output directory.

366

"""

367

import os

368

369

# Validate inputs

370

if not os.path.exists(dataset):

371

raise FileNotFoundError(f"Dataset file not found: {dataset}")

372

373

if train_size <= 0 or train_size >= 1:

374

raise ValueError("Train size must be between 0 and 1")

375

376

# Setup output directory

377

os.makedirs(output_dir, exist_ok=True)

378

379

config = {

380

'model_type': model_type,

381

'train_size': train_size,

382

'random_seed': random_seed,

383

'normalize': normalize,

384

'gpu_count': gpu_count

385

}

386

387

if config_file:

388

import json

389

with open(config_file, 'r') as f:

390

file_config = json.load(f)

391

config.update(file_config)

392

393

if verbose or debug:

394

print("Configuration:")

395

for key, value in config.items():

396

print(f" {key}: {value}")

397

398

# Analysis logic would go here

399

print(f"Analyzing {dataset} with {model_type} model...")

400

401

# Save results

402

results_file = os.path.join(output_dir, "results.json")

403

import json

404

with open(results_file, 'w') as f:

405

json.dump(config, f, indent=2)

406

407

print(f"Results saved to {results_file}")

408

409

if __name__ == "__main__":

410

call_parse(data_analysis)

411

412

# Command line usage examples:

413

# python analysis.py data.csv --model-type neural --normalize true --verbose

414

# python analysis.py data.csv --train-size 0.7 --gpu-count --debug

415

# python analysis.py data.csv --config-file config.json --output-dir results/

416

```

417

418

### Documentation Extraction and Processing

419

420

```python

421

from fastcore.docments import docstring, parse_docstring, docments, get_source

422

423

def analyze_function_documentation(func):

424

"""Comprehensive analysis of function documentation."""

425

426

# Extract basic docstring

427

doc = docstring(func)

428

print(f"Docstring: {doc}")

429

430

# Parse structured docstring

431

parsed = parse_docstring(func)

432

print(f"Summary: {parsed.get('Summary', 'No summary')}")

433

print(f"Parameters: {parsed.get('Parameters', 'No parameters documented')}")

434

435

# Extract detailed parameter documentation

436

param_docs = docments(func, full=True)

437

print("\nDetailed parameter information:")

438

for name, info in param_docs.items():

439

print(f" {name}: {info.get('docment', 'No documentation')}")

440

if hasattr(info, 'anno') and info.anno:

441

print(f" Type: {info.anno}")

442

if hasattr(info, 'default') and info.default is not None:

443

print(f" Default: {info.default}")

444

445

# Get source code

446

source = get_source(func)

447

if source:

448

print(f"\nSource code:\n{source}")

449

450

# Example function to analyze

451

def example_function(

452

data: list,

453

threshold: float = 0.5,

454

normalize: bool = True

455

) -> dict:

456

"""

457

Process data with threshold filtering.

458

459

Parameters

460

----------

461

data : list

462

Input data to process

463

threshold : float, optional

464

Filtering threshold (default: 0.5)

465

normalize : bool, optional

466

Whether to normalize results (default: True)

467

468

Returns

469

-------

470

dict

471

Processed results with statistics

472

473

Examples

474

--------

475

>>> result = example_function([1, 2, 3, 4, 5])

476

>>> print(result['count'])

477

5

478

"""

479

filtered = [x for x in data if x > threshold]

480

if normalize:

481

total = sum(filtered)

482

filtered = [x/total for x in filtered]

483

484

return {

485

'data': filtered,

486

'count': len(filtered),

487

'normalized': normalize

488

}

489

490

# Analyze the function

491

analyze_function_documentation(example_function)

492

```

493

494

### Automated API Documentation

495

496

```python

497

from fastcore.docments import extract_docstrings, sig2str

498

from fastcore.script import clean_type_str

499

import inspect

500

501

class APIDocumentationGenerator:

502

"""Generate API documentation from Python modules."""

503

504

def __init__(self, module):

505

self.module = module

506

self.functions = []

507

self.classes = []

508

509

# Analyze module contents

510

for name in dir(module):

511

obj = getattr(module, name)

512

if not name.startswith('_'):

513

if inspect.isfunction(obj):

514

self.functions.append((name, obj))

515

elif inspect.isclass(obj):

516

self.classes.append((name, obj))

517

518

def generate_function_doc(self, name, func):

519

"""Generate documentation for a function."""

520

sig = inspect.signature(func)

521

doc = docstring(func)

522

523

# Clean up signature display

524

params = []

525

for param_name, param in sig.parameters.items():

526

param_str = param_name

527

if param.annotation != inspect.Parameter.empty:

528

type_str = clean_type_str(param.annotation)

529

param_str += f": {type_str}"

530

if param.default != inspect.Parameter.empty:

531

param_str += f" = {param.default}"

532

params.append(param_str)

533

534

signature = f"{name}({', '.join(params)})"

535

if sig.return_annotation != inspect.Parameter.empty:

536

return_type = clean_type_str(sig.return_annotation)

537

signature += f" -> {return_type}"

538

539

return {

540

'name': name,

541

'signature': signature,

542

'docstring': doc,

543

'source_file': inspect.getfile(func) if hasattr(func, '__file__') else None

544

}

545

546

def generate_class_doc(self, name, cls):

547

"""Generate documentation for a class."""

548

doc = docstring(cls)

549

methods = []

550

551

for method_name in dir(cls):

552

if not method_name.startswith('_') or method_name == '__init__':

553

method = getattr(cls, method_name)

554

if inspect.isfunction(method) or inspect.ismethod(method):

555

methods.append(self.generate_function_doc(method_name, method))

556

557

return {

558

'name': name,

559

'docstring': doc,

560

'methods': methods,

561

'source_file': inspect.getfile(cls) if hasattr(cls, '__file__') else None

562

}

563

564

def generate_markdown(self):

565

"""Generate markdown documentation."""

566

lines = [f"# {self.module.__name__} API Documentation\n"]

567

568

if self.module.__doc__:

569

lines.append(f"{self.module.__doc__}\n")

570

571

if self.functions:

572

lines.append("## Functions\n")

573

for name, func in self.functions:

574

doc = self.generate_function_doc(name, func)

575

lines.append(f"### {doc['signature']}\n")

576

if doc['docstring']:

577

lines.append(f"{doc['docstring']}\n")

578

lines.append("")

579

580

if self.classes:

581

lines.append("## Classes\n")

582

for name, cls in self.classes:

583

doc = self.generate_class_doc(name, cls)

584

lines.append(f"### class {doc['name']}\n")

585

if doc['docstring']:

586

lines.append(f"{doc['docstring']}\n")

587

588

if doc['methods']:

589

lines.append("#### Methods\n")

590

for method in doc['methods']:

591

lines.append(f"**{method['signature']}**\n")

592

if method['docstring']:

593

lines.append(f"{method['docstring']}\n")

594

lines.append("")

595

596

return '\n'.join(lines)

597

598

# Generate documentation for fastcore.basics

599

import fastcore.basics

600

doc_gen = APIDocumentationGenerator(fastcore.basics)

601

markdown_docs = doc_gen.generate_markdown()

602

603

# Save to file

604

with open('api_docs.md', 'w') as f:

605

f.write(markdown_docs)

606

607

print("API documentation generated in api_docs.md")

608

```

609

610

### Testing Command-Line Interfaces

611

612

```python

613

from fastcore.script import args_from_prog, call_parse

614

import subprocess

615

import tempfile

616

import os

617

618

def test_cli_application():

619

"""Test CLI application programmatically."""

620

621

def sample_app(

622

input_file: str,

623

output_file: str = "output.txt",

624

verbose: bool = False

625

):

626

"""Sample application for testing."""

627

if verbose:

628

print(f"Processing {input_file} -> {output_file}")

629

630

with open(input_file, 'r') as f:

631

content = f.read()

632

633

with open(output_file, 'w') as f:

634

f.write(f"Processed: {content}")

635

636

return output_file

637

638

# Test with temporary files

639

with tempfile.TemporaryDirectory() as temp_dir:

640

# Create test input file

641

input_path = os.path.join(temp_dir, "input.txt")

642

with open(input_path, 'w') as f:

643

f.write("Test content")

644

645

# Test different argument combinations

646

test_cases = [

647

# Basic usage

648

{

649

'args': [input_path],

650

'expected_output': 'output.txt'

651

},

652

# Custom output file

653

{

654

'args': [input_path, '--output-file', 'custom.txt'],

655

'expected_output': 'custom.txt'

656

},

657

# Verbose mode

658

{

659

'args': [input_path, '--verbose'],

660

'expected_output': 'output.txt'

661

}

662

]

663

664

for i, test_case in enumerate(test_cases):

665

print(f"Running test case {i + 1}: {test_case['args']}")

666

667

# Simulate command-line execution

668

import sys

669

old_argv = sys.argv

670

try:

671

sys.argv = ['test_script.py'] + test_case['args']

672

result = call_parse(sample_app)

673

674

# Verify output file was created

675

expected_file = test_case['expected_output']

676

if os.path.exists(expected_file):

677

with open(expected_file, 'r') as f:

678

content = f.read()

679

assert "Processed: Test content" in content

680

print(f"✓ Test case {i + 1} passed")

681

os.remove(expected_file) # Cleanup

682

else:

683

print(f"✗ Test case {i + 1} failed: output file not found")

684

685

finally:

686

sys.argv = old_argv

687

688

# Run the test

689

test_cli_application()

690

```

691

692

### Integration with Other FastCore Components

693

694

```python

695

from fastcore.script import call_parse, Param

696

from fastcore.parallel import parallel

697

from fastcore.foundation import L

698

from fastcore.xtras import walk

699

from fastcore.net import urlread

700

import json

701

702

def web_scraper_cli(

703

urls_file: Param("File containing URLs to scrape"),

704

output_dir: Param("Output directory for scraped content", default="./scraped"),

705

workers: Param("Number of parallel workers", type=int, default=4),

706

delay: Param("Delay between requests (seconds)", type=float, default=1.0),

707

format: Param("Output format", choices=['json', 'txt'], default='json'),

708

verbose: Param("Verbose output", action='store_true')

709

):

710

"""

711

Scrape content from URLs in parallel using FastCore utilities.

712

713

Reads URLs from file, scrapes content in parallel, and saves results

714

in the specified format. Demonstrates integration of multiple FastCore

715

components in a real application.

716

"""

717

import os

718

from urllib.parse import urlparse

719

import time

720

721

# Create output directory

722

os.makedirs(output_dir, exist_ok=True)

723

724

# Read URLs from file

725

with open(urls_file, 'r') as f:

726

urls = [line.strip() for line in f if line.strip()]

727

728

if verbose:

729

print(f"Found {len(urls)} URLs to scrape")

730

print(f"Using {workers} parallel workers")

731

732

def scrape_url(url):

733

"""Scrape a single URL with error handling."""

734

try:

735

if delay > 0:

736

time.sleep(delay)

737

738

content = urlread(url)

739

740

# Generate filename from URL

741

parsed = urlparse(url)

742

filename = f"{parsed.netloc}_{parsed.path.replace('/', '_')}"

743

if not filename.endswith('.txt') and format == 'txt':

744

filename += '.txt'

745

elif not filename.endswith('.json') and format == 'json':

746

filename += '.json'

747

748

filepath = os.path.join(output_dir, filename)

749

750

# Save content

751

if format == 'json':

752

data = {

753

'url': url,

754

'content': content,

755

'scraped_at': time.time()

756

}

757

with open(filepath, 'w') as f:

758

json.dump(data, f, indent=2)

759

else:

760

with open(filepath, 'w') as f:

761

f.write(f"URL: {url}\n")

762

f.write(f"Content:\n{content}")

763

764

if verbose:

765

print(f"✓ Scraped {url} -> {filename}")

766

767

return {'url': url, 'file': filepath, 'success': True}

768

769

except Exception as e:

770

if verbose:

771

print(f"✗ Failed to scrape {url}: {e}")

772

return {'url': url, 'error': str(e), 'success': False}

773

774

# Scrape URLs in parallel

775

results = parallel(scrape_url, urls, n_workers=workers, progress=verbose)

776

777

# Generate summary

778

successful = L(results).filter(lambda x: x['success'])

779

failed = L(results).filter(lambda x: not x['success'])

780

781

summary = {

782

'total_urls': len(urls),

783

'successful': len(successful),

784

'failed': len(failed),

785

'output_directory': output_dir,

786

'files_created': [r['file'] for r in successful if 'file' in r]

787

}

788

789

# Save summary

790

summary_file = os.path.join(output_dir, 'scraping_summary.json')

791

with open(summary_file, 'w') as f:

792

json.dump(summary, f, indent=2)

793

794

print(f"\nScraping completed!")

795

print(f"Successful: {summary['successful']}/{summary['total_urls']}")

796

print(f"Results saved in: {output_dir}")

797

798

if failed:

799

print(f"Failed URLs: {len(failed)}")

800

for result in failed:

801

print(f" {result['url']}: {result['error']}")

802

803

if __name__ == "__main__":

804

call_parse(web_scraper_cli)

805

806

# Usage:

807

# python scraper.py urls.txt --workers 8 --format json --verbose

808

# python scraper.py urls.txt --output-dir ./data --delay 2.0

809

```