Simple, modern and high performance file watching and code reload in python.
Automatic process restarting and management functionality that watches for file changes and restarts processes accordingly. Supports both Python functions and shell commands with configurable signal handling, graceful shutdown, and callback mechanisms.
Run a process and automatically restart it when file changes are detected. Supports Python functions and shell commands with extensive configuration options.
def run_process(
*paths: Union[Path, str],
target: Union[str, Callable[..., Any]],
args: Tuple[Any, ...] = (),
kwargs: Optional[Dict[str, Any]] = None,
target_type: Literal['function', 'command', 'auto'] = 'auto',
callback: Optional[Callable[[Set[FileChange]], None]] = None,
watch_filter: Optional[Callable[[Change, str], bool]] = DefaultFilter(),
grace_period: float = 0,
debounce: int = 1_600,
step: int = 50,
debug: Optional[bool] = None,
sigint_timeout: int = 5,
sigkill_timeout: int = 1,
recursive: bool = True,
ignore_permission_denied: bool = False,
) -> int:
"""
Run a process and restart it upon file changes.
Parameters:
- *paths: Filesystem paths to watch (same as watch())
- target: Function or command to run
- args: Arguments to pass to target (function only)
- kwargs: Keyword arguments to pass to target (function only)
- target_type: 'function', 'command', or 'auto' (uses detect_target_type)
- callback: Function called on each reload with changes as argument
- watch_filter: File filter (same as watch())
- grace_period: Seconds after process start before watching changes
- debounce: Debounce time in ms (same as watch())
- step: Step time in ms (same as watch())
- debug: Enable debug output
- sigint_timeout: Seconds to wait after SIGINT before SIGKILL
- sigkill_timeout: Seconds to wait after SIGKILL before exception
- recursive: Watch recursively (same as watch())
- ignore_permission_denied: Ignore permission errors
Returns:
int: Number of times the process was reloaded
Note:
Uses multiprocessing.get_context('spawn').Process for Python functions
to avoid forking and improve code reload/import behavior.
"""Usage Examples:
from watchfiles import run_process
# Run a Python function
def my_app(name, port=8000):
print(f"Starting {name} on port {port}...")
# App logic here
# Watch current directory and restart function on changes
reloads = run_process('.', target=my_app, args=('MyApp',), kwargs={'port': 3000})
print(f"Process reloaded {reloads} times")
# Run a shell command
run_process('./src', target='python main.py')
# With callback function
def on_reload(changes):
print(f"Reloading due to: {changes}")
run_process('./src', target='python server.py', callback=on_reload)
# With grace period (wait before watching)
run_process('./src', target=my_app, grace_period=2.0)Async equivalent of run_process with support for async callbacks and proper integration with async event loops.
async def arun_process(
*paths: Union[Path, str],
target: Union[str, Callable[..., Any]],
args: Tuple[Any, ...] = (),
kwargs: Optional[Dict[str, Any]] = None,
target_type: Literal['function', 'command', 'auto'] = 'auto',
callback: Optional[Callable[[Set[FileChange]], Any]] = None,
watch_filter: Optional[Callable[[Change, str], bool]] = DefaultFilter(),
grace_period: float = 0,
debounce: int = 1_600,
step: int = 50,
debug: Optional[bool] = None,
recursive: bool = True,
ignore_permission_denied: bool = False,
) -> int:
"""
Async version of run_process.
Parameters:
Same as run_process except:
- callback: Can be a coroutine function
- No sigint_timeout/sigkill_timeout (handled by async framework)
Returns:
int: Number of times the process was reloaded
Note:
Starting/stopping processes and watching done in separate threads.
KeyboardInterrupt must be caught at asyncio.run() level.
"""Usage Examples:
import asyncio
from watchfiles import arun_process
async def async_callback(changes):
await asyncio.sleep(0.1) # Async operation
print(f"Async reload callback: {changes}")
def my_app():
print("App running...")
async def main():
reloads = await arun_process('./src', target=my_app, callback=async_callback)
print(f"Reloaded {reloads} times")
try:
asyncio.run(main())
except KeyboardInterrupt:
print("Stopped via KeyboardInterrupt")Automatically detect whether a target should be run as a function or command when target_type='auto'.
def detect_target_type(target: Union[str, Callable[..., Any]]) -> Literal['function', 'command']:
"""
Used by run_process and arun_process to determine the target type when target_type is 'auto'.
Detects the target type - either 'function' or 'command'. This method is only called with target_type='auto'.
The following logic is employed:
- If target is not a string, it is assumed to be a function
- If target ends with '.py' or '.sh', it is assumed to be a command
- Otherwise, the target is assumed to be a function if it matches the regex [a-zA-Z0-9_]+(\\.[a-zA-Z0-9_]+)+
If this logic does not work for you, specify the target type explicitly using the target_type function argument
or --target-type command line argument.
Parameters:
- target: The target value to analyze
Returns:
Either 'function' or 'command'
"""Usage Examples:
from watchfiles import detect_target_type
# These return 'function'
print(detect_target_type(my_function)) # 'function'
print(detect_target_type('mymodule.main')) # 'function'
# These return 'command'
print(detect_target_type('python main.py')) # 'command'
print(detect_target_type('main.py')) # 'command'
print(detect_target_type('./script.sh')) # 'command'Import a callable from a dotted module path string, used internally for function targets.
def import_string(dotted_path: str) -> Any:
"""
Import a dotted module path and return the attribute/class designated by the
last name in the path. Raise ImportError if the import fails.
Stolen approximately from django. This is used to import function targets
when they are specified as dotted strings.
Parameters:
- dotted_path: Dotted module path like 'mypackage.module.function'
Returns:
The imported attribute/function/class
Raises:
ImportError: If dotted_path doesn't look like a module path or if the
module doesn't define the specified attribute
"""Usage Examples:
from watchfiles import import_string
# Import a function
func = import_string('mypackage.utils.helper_function')
result = func(arg1, arg2)
# Import a class
MyClass = import_string('mypackage.models.MyModel')
instance = MyClass()When processes are started, the WATCHFILES_CHANGES environment variable is set to a JSON string containing the file changes that triggered the restart:
import os
import json
def my_target_function():
# Access changes that triggered this restart
changes_json = os.getenv('WATCHFILES_CHANGES', '[]')
changes = json.loads(changes_json)
print(f"Restarted due to: {changes}")
# changes is a list of [change_type_string, path] pairsShell Command Example:
#!/bin/bash
echo "Changes that triggered restart: $WATCHFILES_CHANGES"Process management includes comprehensive signal handling for graceful shutdown:
sigint_timeout seconds for graceful shutdownsigkill_timeout seconds before raising exceptionSIGTERM Handling: The watchfiles process automatically registers a SIGTERM handler that raises KeyboardInterrupt, ensuring clean shutdown in containerized environments.
# Internal types used by process management
class CombinedProcess:
"""Wrapper for both subprocess.Popen and multiprocessing.Process"""
def __init__(self, p: Union[SpawnProcess, subprocess.Popen[bytes]]) -> None: ...
def stop(self, sigint_timeout: int = 5, sigkill_timeout: int = 1) -> None: ...
def is_alive(self) -> bool: ...
@property
def pid(self) -> int: ...
def join(self, timeout: int) -> None: ...
@property
def exitcode(self) -> Optional[int]: ...Install with Tessl CLI
npx tessl i tessl/pypi-watchfiles