Cross-platform file locking library that provides reliable file locking mechanisms across Windows, Linux, Unix, and macOS systems
—
Additional utilities for atomic file operations and helper functions that complement the core locking functionality.
Utility for atomic file writing that ensures files are written completely or not at all, preventing corruption from interrupted writes.
def open_atomic(filename: Filename, binary: bool = True) -> typing.Iterator[typing.IO]:
"""
Context manager for atomic file writing using temporary files.
Instead of locking, this method allows you to write the entire file
and atomically move it to the final location using os.rename().
Parameters:
- filename: Target filename (str or pathlib.Path)
- binary: If True, open in binary mode ('wb'), else text mode ('w')
Yields:
- File handle for writing to temporary file
Raises:
- AssertionError: If target file already exists
Note:
- Creates parent directories if they don't exist
- Uses os.rename() which is atomic on most platforms
- Temporary file is created in same directory as target
- Automatic cleanup if operation fails
"""Basic atomic file writing:
import portalocker
# Write file atomically - either complete file is written or nothing
with portalocker.open_atomic('config.json') as fh:
import json
config_data = {'key': 'value', 'settings': {'debug': True}}
json.dump(config_data, fh)
# File only becomes visible at config.json when context exits successfully
# If anything goes wrong during writing, config.json won't be created/modifiedText mode atomic writing:
import portalocker
# Write text file atomically
with portalocker.open_atomic('output.txt', binary=False) as fh:
fh.write('Line 1\n')
fh.write('Line 2\n')
fh.write('Final line\n')
# File is complete when context exitsAtomic file updates:
import portalocker
import json
import os
def update_config_file(filename, new_settings):
"""Atomically update a JSON config file"""
# Read existing config
existing_config = {}
if os.path.exists(filename):
with open(filename, 'r') as fh:
existing_config = json.load(fh)
# Merge with new settings
existing_config.update(new_settings)
# Write atomically - if this fails, original file is unchanged
with portalocker.open_atomic(filename, binary=False) as fh:
json.dump(existing_config, fh, indent=2)
fh.write('\n')
# Usage
update_config_file('app_config.json', {'debug': False, 'version': '2.0'})Binary file atomic operations:
import portalocker
import pickle
def save_data_atomically(filename, data):
"""Save Python object to file atomically"""
with portalocker.open_atomic(filename, binary=True) as fh:
pickle.dump(data, fh)
# Save large data structure atomically
large_dataset = {'users': [...], 'transactions': [...]}
save_data_atomically('dataset.pkl', large_dataset)Working with pathlib.Path:
import portalocker
import pathlib
# Works with pathlib.Path objects
data_dir = pathlib.Path('/data/exports')
output_file = data_dir / 'report.csv'
with portalocker.open_atomic(output_file, binary=False) as fh:
fh.write('column1,column2,column3\n')
fh.write('value1,value2,value3\n')
# Parent directories created automatically if neededError handling and cleanup:
import portalocker
import json
try:
with portalocker.open_atomic('critical_data.json') as fh:
# If this raises an exception, the target file remains untouched
data = generate_critical_data()
json.dump(data, fh)
# Simulate an error during writing
if should_fail():
raise ValueError("Processing failed")
except ValueError as e:
print(f"Writing failed: {e}")
# critical_data.json was not created/modified
# Temporary file was automatically cleaned upAtomic replacement of existing files:
import portalocker
import shutil
def atomic_file_replacement(source_file, target_file):
"""Atomically replace target file with processed version of source"""
# Process source and write to target atomically
with open(source_file, 'r') as src:
with portalocker.open_atomic(target_file, binary=False) as dst:
# Process and write line by line
for line in src:
processed_line = process_line(line)
dst.write(processed_line)
# If we get here, target_file now contains the processed version
print(f"Successfully replaced {target_file}")
# Usage
atomic_file_replacement('input.log', 'processed.log')Atomic file operations vs file locking serve different purposes:
import portalocker
# File locking: Multiple processes coordinate access to same file
def append_with_locking(filename, data):
"""Multiple processes can safely append to the same file"""
with portalocker.Lock(filename, 'a') as fh:
fh.write(data + '\n')
fh.flush()
# Atomic writing: Ensure file is completely written or not at all
def replace_with_atomic(filename, data):
"""Ensure file contains complete data or doesn't exist"""
with portalocker.open_atomic(filename, binary=False) as fh:
fh.write(data)
# Use locking for shared access
append_with_locking('shared.log', 'Process 1 data')
append_with_locking('shared.log', 'Process 2 data')
# Use atomic for complete replacement
replace_with_atomic('config.json', '{"version": "1.0"}')The atomic nature depends on the underlying filesystem:
import portalocker
import os
# os.rename() is atomic on most POSIX systems and Windows
# but behavior may vary on network filesystems
def safe_atomic_write(filename, data):
"""Write with awareness of platform limitations"""
try:
with portalocker.open_atomic(filename, binary=False) as fh:
fh.write(data)
return True
except Exception as e:
print(f"Atomic write failed: {e}")
return False
# For critical applications, consider combining with locking
def ultra_safe_write(filename, data):
"""Combine atomic writing with file locking"""
lock_file = filename + '.lock'
with portalocker.Lock(lock_file, 'w'):
# Only one process can write at a time
with portalocker.open_atomic(filename, binary=False) as fh:
fh.write(data)from typing import Union, Iterator
import pathlib
import typing
# Filename type (same as used by Lock classes)
Filename = Union[str, pathlib.Path]
# Generic IO type for file handles
IO = Union[typing.IO[str], typing.IO[bytes]]Install with Tessl CLI
npx tessl i tessl/pypi-portalocker