A library for accessing stubs in typeshed.
This document provides comprehensive examples of using typeshed_client in real-world scenarios.
Building a simple type checker that validates if names exist in stubs.
from typeshed_client import Resolver, get_search_context, NameInfo, ImportedInfo
import ast
class SimpleTypeChecker:
def __init__(self, python_version=(3, 11)):
ctx = get_search_context(version=python_version)
self.resolver = Resolver(search_context=ctx)
def check_name_exists(self, fully_qualified_name: str) -> dict:
"""Check if a name exists and return information about it."""
result = self.resolver.get_fully_qualified_name(fully_qualified_name)
if result is None:
return {
'exists': False,
'message': f"'{fully_qualified_name}' not found"
}
info = None
if isinstance(result, NameInfo):
info = result
source = "defined directly"
elif isinstance(result, ImportedInfo):
info = result.info
source = f"imported from {'.'.join(result.source_module)}"
else:
return {
'exists': True,
'type': 'module',
'message': f"'{fully_qualified_name}' is a module"
}
return {
'exists': True,
'name': info.name,
'exported': info.is_exported,
'source': source,
'ast_type': type(info.ast).__name__
}
def validate_imports(self, import_statements: list[str]) -> dict:
"""Validate a list of import statements."""
results = {}
for stmt in import_statements:
# Simple parsing (real implementation would use ast.parse)
if stmt.startswith('from '):
# from module import name
parts = stmt.replace('from ', '').replace(' import ', '.').split('.')
full_name = '.'.join(parts)
else:
# import module
full_name = stmt.replace('import ', '')
results[stmt] = self.check_name_exists(full_name)
return results
# Usage
checker = SimpleTypeChecker(python_version=(3, 11))
imports = [
'from typing import List',
'from collections import OrderedDict',
'from typing import NonExistentType',
]
for import_stmt in imports:
result = checker.check_name_exists(import_stmt.split()[-1])
if result['exists']:
print(f"✓ {import_stmt}: {result.get('source', 'exists')}")
else:
print(f"✗ {import_stmt}: {result['message']}")Generate documentation from stub files.
from typeshed_client import get_stub_names, get_search_context
import ast
class StubDocGenerator:
def __init__(self):
self.ctx = get_search_context()
def generate_module_docs(self, module_name: str) -> str:
"""Generate markdown documentation for a module."""
names = get_stub_names(module_name, search_context=self.ctx)
if not names:
return f"# {module_name}\n\nModule not found."
lines = [f"# {module_name}\n"]
# Separate by type
functions = []
classes = []
constants = []
for name, info in names.items():
if not info.is_exported:
continue
if isinstance(info.ast, ast.FunctionDef):
functions.append((name, info))
elif isinstance(info.ast, ast.ClassDef):
classes.append((name, info))
else:
constants.append((name, info))
# Document functions
if functions:
lines.append("\n## Functions\n")
for name, info in functions:
func = info.ast
args = [arg.arg for arg in func.args.args]
ret = ast.unparse(func.returns) if func.returns else "Any"
lines.append(f"### `{name}({', '.join(args)}) -> {ret}`\n")
docstring = ast.get_docstring(func)
if docstring:
lines.append(f"{docstring}\n")
# Document classes
if classes:
lines.append("\n## Classes\n")
for name, info in classes:
cls = info.ast
bases = [ast.unparse(b) for b in cls.bases]
base_str = f"({', '.join(bases)})" if bases else ""
lines.append(f"### `{name}{base_str}`\n")
docstring = ast.get_docstring(cls)
if docstring:
lines.append(f"{docstring}\n")
# Document methods
if info.child_nodes:
public_methods = [
(n, i) for n, i in info.child_nodes.items()
if i.is_exported and isinstance(i.ast, ast.FunctionDef)
]
if public_methods:
lines.append("\n#### Methods\n")
for method_name, method_info in public_methods:
lines.append(f"- `{method_name}()`")
return '\n'.join(lines)
# Usage
generator = StubDocGenerator()
docs = generator.generate_module_docs('typing')
print(docs[:500]) # Print first 500 charsCheck API compatibility across Python versions.
from typeshed_client import get_search_context, get_stub_names
from collections import defaultdict
class CompatibilityChecker:
def check_compatibility(self, module_name: str, versions: list[tuple[int, int]]) -> dict:
"""Check which APIs are available in each version."""
version_apis = {}
for version in versions:
ctx = get_search_context(version=version)
names = get_stub_names(module_name, search_context=ctx)
if names:
exported = {name for name, info in names.items() if info.is_exported}
version_apis[version] = exported
else:
version_apis[version] = set()
# Analyze compatibility
all_apis = set().union(*version_apis.values())
compatibility = {}
for api in all_apis:
available_in = [v for v, apis in version_apis.items() if api in apis]
compatibility[api] = {
'available_in': available_in,
'min_version': min(available_in) if available_in else None,
'universal': len(available_in) == len(versions)
}
return compatibility
def find_new_apis(self, module_name: str, old_version: tuple, new_version: tuple) -> set:
"""Find APIs added between versions."""
compat = self.check_compatibility(module_name, [old_version, new_version])
new_apis = {
api for api, info in compat.items()
if new_version in info['available_in']
and old_version not in info['available_in']
}
return new_apis
# Usage
checker = CompatibilityChecker()
# Check typing module compatibility
compat = checker.check_compatibility('typing', [(3, 9), (3, 10), (3, 11), (3, 12)])
# Find universal APIs
universal = [api for api, info in compat.items() if info['universal']]
print(f"Universal APIs: {len(universal)}")
# Find new APIs in 3.12
new_in_312 = checker.find_new_apis('typing', (3, 9), (3, 12))
print(f"New in 3.12: {sorted(new_in_312)[:10]}")Validate stub file quality and completeness.
from typeshed_client import get_stub_names, get_search_context
from typeshed_client.parser import InvalidStub
import ast
class StubValidator:
def __init__(self, strict=True):
self.ctx = get_search_context(raise_on_warnings=strict)
def validate_module(self, module_name: str) -> dict:
"""Validate a module's stub file."""
result = {
'module': module_name,
'valid': False,
'errors': [],
'warnings': [],
'stats': {}
}
try:
names = get_stub_names(module_name, search_context=self.ctx)
if names is None:
result['errors'].append("Module not found")
return result
# Collect statistics
total = len(names)
exported = sum(1 for info in names.values() if info.is_exported)
functions = sum(1 for info in names.values()
if isinstance(info.ast, ast.FunctionDef))
classes = sum(1 for info in names.values()
if isinstance(info.ast, ast.ClassDef))
result['stats'] = {
'total_names': total,
'exported': exported,
'private': total - exported,
'functions': functions,
'classes': classes
}
# Check for common issues
if exported == 0 and total > 0:
result['warnings'].append("No exported names (all private)")
if '__all__' not in names:
result['warnings'].append("No __all__ defined")
result['valid'] = True
except InvalidStub as e:
result['errors'].append(f"Invalid stub: {e}")
except Exception as e:
result['errors'].append(f"Unexpected error: {e}")
return result
def validate_modules(self, module_names: list[str]) -> dict:
"""Validate multiple modules."""
results = {}
for module in module_names:
results[module] = self.validate_module(module)
return results
# Usage
validator = StubValidator(strict=True)
modules = ['typing', 'collections', 'asyncio', 'dataclasses']
results = validator.validate_modules(modules)
for module, result in results.items():
if result['valid']:
stats = result['stats']
print(f"✓ {module}: {stats['exported']} exported, {stats['functions']} functions, {stats['classes']} classes")
if result['warnings']:
for warning in result['warnings']:
print(f" ⚠ {warning}")
else:
print(f"✗ {module}: {', '.join(result['errors'])}")Analyze import dependencies between modules.
from typeshed_client import Resolver, ModulePath, ImportedName, get_search_context
class DependencyAnalyzer:
def __init__(self):
ctx = get_search_context()
self.resolver = Resolver(search_context=ctx)
def get_dependencies(self, module_name: str) -> set[str]:
"""Get all modules that a module depends on."""
module = self.resolver.get_module(ModulePath(tuple(module_name.split('.'))))
if not module.exists:
return set()
dependencies = set()
for name, info in module.names.items():
if isinstance(info.ast, ImportedName):
dep = '.'.join(info.ast.module_name)
dependencies.add(dep)
return dependencies
def build_dependency_graph(self, root_modules: list[str], depth=2) -> dict:
"""Build dependency graph up to specified depth."""
graph = {}
visited = set()
def traverse(module, current_depth):
if current_depth > depth or module in visited:
return
visited.add(module)
deps = self.get_dependencies(module)
graph[module] = deps
for dep in deps:
if dep not in visited:
traverse(dep, current_depth + 1)
for root in root_modules:
traverse(root, 0)
return graph
def find_circular_dependencies(self, graph: dict) -> list[tuple]:
"""Find circular dependencies in the graph."""
circular = []
for module, deps in graph.items():
for dep in deps:
if dep in graph and module in graph[dep]:
circular.append((module, dep))
return circular
# Usage
analyzer = DependencyAnalyzer()
# Analyze dependencies
deps = analyzer.get_dependencies('collections')
print(f"collections depends on: {sorted(deps)}")
# Build dependency graph
graph = analyzer.build_dependency_graph(['typing', 'collections'], depth=1)
print(f"\nDependency graph ({len(graph)} modules):")
for module, deps in sorted(graph.items()):
if deps:
print(f" {module} -> {sorted(deps)}")Extract type annotations for code generation.
from typeshed_client import get_stub_names
import ast
class TypeAnnotationExtractor:
def extract_function_signatures(self, module_name: str) -> dict:
"""Extract all function signatures with type annotations."""
names = get_stub_names(module_name)
if not names:
return {}
signatures = {}
for name, info in names.items():
if not info.is_exported:
continue
if isinstance(info.ast, ast.FunctionDef):
sig = self._parse_function(info.ast)
signatures[name] = sig
return signatures
def _parse_function(self, func: ast.FunctionDef) -> dict:
"""Parse function signature."""
params = []
for arg in func.args.args:
param = {'name': arg.arg}
if arg.annotation:
param['type'] = ast.unparse(arg.annotation)
params.append(param)
return_type = "Any"
if func.returns:
return_type = ast.unparse(func.returns)
return {
'parameters': params,
'return_type': return_type,
'is_async': isinstance(func, ast.AsyncFunctionDef)
}
def generate_stub_skeleton(self, module_name: str) -> str:
"""Generate a stub skeleton for a module."""
sigs = self.extract_function_signatures(module_name)
lines = [f'"""Stub for {module_name}"""', '']
for name, sig in sigs.items():
# Build parameter list
params = []
for param in sig['parameters']:
if 'type' in param:
params.append(f"{param['name']}: {param['type']}")
else:
params.append(param['name'])
# Build function definition
async_kw = 'async ' if sig['is_async'] else ''
param_str = ', '.join(params)
ret = sig['return_type']
lines.append(f'{async_kw}def {name}({param_str}) -> {ret}:')
lines.append(f' """TODO: Implement {name}"""')
lines.append(' ...')
lines.append('')
return '\n'.join(lines)
# Usage
extractor = TypeAnnotationExtractor()
# Extract signatures
sigs = extractor.extract_function_signatures('typing')
print(f"Extracted {len(sigs)} function signatures")
# Show first few signatures
for name, sig in list(sigs.items())[:3]:
params = ', '.join(p['name'] for p in sig['parameters'])
print(f"{name}({params}) -> {sig['return_type']}")Identify platform-specific APIs.
from typeshed_client import get_search_context, get_stub_names
class PlatformAnalyzer:
def compare_platforms(self, module_name: str, platforms: list[str]) -> dict:
"""Compare API availability across platforms."""
platform_apis = {}
for platform in platforms:
ctx = get_search_context(platform=platform)
names = get_stub_names(module_name, search_context=ctx)
if names:
exported = {name for name, info in names.items() if info.is_exported}
platform_apis[platform] = exported
else:
platform_apis[platform] = set()
# Find platform-specific APIs
all_apis = set().union(*platform_apis.values())
common_apis = set.intersection(*platform_apis.values()) if platform_apis else set()
platform_specific = {}
for platform, apis in platform_apis.items():
specific = apis - common_apis
platform_specific[platform] = specific
return {
'total_apis': len(all_apis),
'common_apis': common_apis,
'platform_specific': platform_specific,
'platforms': platforms
}
def find_portable_subset(self, module_name: str, platforms: list[str]) -> set:
"""Find APIs that work on all specified platforms."""
result = self.compare_platforms(module_name, platforms)
return result['common_apis']
# Usage
analyzer = PlatformAnalyzer()
# Compare os module across platforms
platforms = ['linux', 'win32', 'darwin']
result = analyzer.compare_platforms('os', platforms)
print(f"Total APIs: {result['total_apis']}")
print(f"Common to all platforms: {len(result['common_apis'])}")
for platform, specific_apis in result['platform_specific'].items():
if specific_apis:
print(f"\n{platform}-specific ({len(specific_apis)}):")
for api in sorted(specific_apis)[:5]:
print(f" - {api}")Help migrate code between Python versions.
from typeshed_client import get_search_context, get_stub_names
class MigrationHelper:
def check_migration_compatibility(
self,
module_name: str,
from_version: tuple,
to_version: tuple
) -> dict:
"""Check what changes when migrating between versions."""
ctx_old = get_search_context(version=from_version)
ctx_new = get_search_context(version=to_version)
names_old = get_stub_names(module_name, search_context=ctx_old)
names_new = get_stub_names(module_name, search_context=ctx_new)
if not names_old or not names_new:
return {'error': 'Module not found in one or both versions'}
old_apis = {name for name, info in names_old.items() if info.is_exported}
new_apis = {name for name, info in names_new.items() if info.is_exported}
return {
'from_version': from_version,
'to_version': to_version,
'added': new_apis - old_apis,
'removed': old_apis - new_apis,
'common': old_apis & new_apis,
'breaking_changes': bool(old_apis - new_apis)
}
def generate_migration_report(
self,
modules: list[str],
from_version: tuple,
to_version: tuple
) -> str:
"""Generate a migration report."""
lines = [
f"# Migration Report: Python {from_version[0]}.{from_version[1]} → {to_version[0]}.{to_version[1]}",
""
]
for module in modules:
result = self.check_migration_compatibility(module, from_version, to_version)
if 'error' in result:
lines.append(f"## {module}: {result['error']}")
continue
lines.append(f"## {module}")
if result['added']:
lines.append(f"\n### Added ({len(result['added'])})")
for api in sorted(result['added'])[:10]:
lines.append(f"- ✓ `{api}`")
if result['removed']:
lines.append(f"\n### Removed ({len(result['removed'])})")
for api in sorted(result['removed'])[:10]:
lines.append(f"- ✗ `{api}`")
if result['breaking_changes']:
lines.append("\n⚠️ **Breaking changes detected**")
lines.append("")
return '\n'.join(lines)
# Usage
helper = MigrationHelper()
# Check specific migration
result = helper.check_migration_compatibility('typing', (3, 8), (3, 12))
print(f"Added: {len(result['added'])} APIs")
print(f"Removed: {len(result['removed'])} APIs")
print(f"Breaking changes: {result['breaking_changes']}")
# Generate full report
report = helper.generate_migration_report(
['typing', 'asyncio', 'dataclasses'],
from_version=(3, 9),
to_version=(3, 12)
)
print("\n" + report[:500])When working with these scenarios:
# EFFICIENT: Single resolver, reused
resolver = Resolver()
for name in names:
result = resolver.get_fully_qualified_name(name)
# INEFFICIENT: New resolver each time
for name in names:
resolver = Resolver() # Wasteful
result = resolver.get_fully_qualified_name(name)Install with Tessl CLI
npx tessl i tessl/pypi-typeshed-client