A python wrapper for rclone that makes rclone's functionality usable in python applications.
—
Comprehensive file system operations for retrieving directory listings, file metadata, storage information, and directory visualization. Provides structured data access to remote file systems with flexible filtering and formatting options.
List files and directories with detailed metadata, flexible filtering, and depth control for efficient directory traversal.
def ls(path: str, max_depth: Union[int, None] = None, dirs_only=False,
files_only=False, args=None) -> List[Dict[str, Union[int, str]]]:
"""
Lists files and directories with comprehensive metadata.
Parameters:
- path (str): Directory path to list ('remote:path' for remotes)
- max_depth (int, optional): Maximum recursion depth (1 = current directory only)
- dirs_only (bool): Return only directories
- files_only (bool): Return only files
- args (List[str]): Additional rclone lsjson flags
Returns:
List[Dict]: List of file/directory objects with metadata:
- Name (str): File or directory name
- Path (str): Full path relative to listed directory
- Size (int): File size in bytes (0 for directories)
- ModTime (str): Last modification time (ISO format)
- IsDir (bool): True if directory, False if file
- ID (str): Unique file identifier (backend-specific)
- MimeType (str): MIME type for files
Raises:
RcloneException: If listing operation fails
"""Retrieve storage quotas, usage statistics, and backend-specific storage properties.
def about(path: str) -> Dict:
"""
Retrieves storage information and quotas for a path.
Parameters:
- path (str): Remote or local path to examine
Returns:
Dict: Storage information containing:
- total (int): Total storage capacity in bytes
- used (int): Used storage in bytes
- free (int): Available storage in bytes
- other (int): Other/reserved storage in bytes
- objects (int): Number of objects stored
- directories (int): Number of directories
Raises:
RcloneException: If about operation fails
"""Calculate total size and file counts for directories and subdirectories.
def size(path: str, args: List[str] = None) -> Dict:
"""
Calculates total size and object count for a path.
Parameters:
- path (str): Directory path to analyze
- args (List[str]): Additional rclone size flags
Returns:
Dict: Size statistics containing:
- count (int): Total number of files
- bytes (int): Total size in bytes
- sizeless (int): Number of objects without size information
Raises:
RcloneException: If size calculation fails
"""Generate tree-style directory structure visualization for easy navigation and overview.
def tree(path: str, args: List[str] = None) -> str:
"""
Generates tree-style directory listing.
Parameters:
- path (str): Root directory path for tree generation
- args (List[str]): Additional rclone tree flags
Returns:
str: Tree-formatted directory structure
Raises:
RcloneException: If tree generation fails
"""from rclone_python import rclone
# List files in remote directory
files = rclone.ls('onedrive:Documents')
# Print file information
for file in files:
print(f"Name: {file['Name']}")
print(f"Size: {file['Size']} bytes")
print(f"Modified: {file['ModTime']}")
print(f"Is Directory: {file['IsDir']}")
print("---")from rclone_python import rclone
# List only files (no directories)
files_only = rclone.ls('dropbox:Photos', files_only=True)
# List only directories
dirs_only = rclone.ls('box:Projects', dirs_only=True)
# Shallow listing (current directory only)
current_level = rclone.ls('gdrive:Work', max_depth=1)
# Deep listing with custom depth
deep_listing = rclone.ls('onedrive:Archive', max_depth=3)from rclone_python import rclone
# Get storage quota and usage
storage_info = rclone.about('onedrive:')
print(f"Total: {storage_info['total'] / (1024**3):.2f} GB")
print(f"Used: {storage_info['used'] / (1024**3):.2f} GB")
print(f"Free: {storage_info['free'] / (1024**3):.2f} GB")
print(f"Files: {storage_info['objects']}")
print(f"Directories: {storage_info['directories']}")from rclone_python import rclone
# Calculate directory size
size_info = rclone.size('box:Projects/current')
print(f"Total files: {size_info['count']}")
print(f"Total size: {size_info['bytes'] / (1024**2):.2f} MB")
# Size with additional options
detailed_size = rclone.size(
'gdrive:Backups',
args=['--fast-list'] # Use fast listing for large directories
)from rclone_python import rclone
# Generate directory tree
tree_output = rclone.tree('onedrive:Projects')
print(tree_output)
# Tree with custom options
detailed_tree = rclone.tree(
'dropbox:Archive',
args=['--human-readable', '--sort', 'size']
)
print(detailed_tree)from rclone_python import rclone
# Find all Python files in directory tree
all_files = rclone.ls('projects:code', files_only=True)
python_files = [f for f in all_files if f['Name'].endswith('.py')]
# Find large files (> 100MB)
large_files = [f for f in all_files if f['Size'] > 100 * 1024 * 1024]
# Find recently modified directories
import datetime
recent_dirs = []
for item in rclone.ls('backup:data', dirs_only=True):
mod_time = datetime.datetime.fromisoformat(item['ModTime'].replace('Z', '+00:00'))
if (datetime.datetime.now(datetime.timezone.utc) - mod_time).days < 7:
recent_dirs.append(item)The ls() function returns detailed metadata for each file and directory:
{
"Name": str, # File/directory name
"Path": str, # Full relative path
"Size": int, # Size in bytes (0 for directories)
"ModTime": str, # ISO 8601 timestamp
"IsDir": bool, # True for directories
"ID": str, # Backend-specific unique identifier
"MimeType": str, # MIME type (files only)
"Tier": str, # Storage tier (if applicable)
"IsBucket": bool, # True for bucket-like containers
"Hashes": { # Available hashes
"md5": str, # MD5 hash (if available)
"sha1": str, # SHA1 hash (if available)
# ... other hashes
}
}The about() function returns comprehensive storage details:
{
"total": int, # Total capacity in bytes
"used": int, # Used space in bytes
"free": int, # Available space in bytes
"other": int, # Reserved/other space in bytes
"objects": int, # Total number of objects
"directories": int # Total number of directories
}from rclone_python import rclone
def process_directory_recursive(path, callback):
"""Process all files in directory tree"""
files = rclone.ls(path, files_only=True)
for file in files:
callback(file)
def print_file_info(file):
print(f"Processing: {file['Path']} ({file['Size']} bytes)")
# Process all files in remote directory
process_directory_recursive('onedrive:Data', print_file_info)from rclone_python import rclone
def monitor_storage_usage(remote):
"""Monitor storage usage across remotes"""
info = rclone.about(f"{remote}:")
usage_percent = (info['used'] / info['total']) * 100
print(f"Storage Usage for {remote}:")
print(f" Used: {usage_percent:.1f}%")
print(f" Free: {info['free'] / (1024**3):.2f} GB")
if usage_percent > 90:
print(f" WARNING: {remote} is nearly full!")
# Monitor multiple remotes
for remote in ['onedrive', 'dropbox', 'box']:
if rclone.check_remote_existing(remote):
monitor_storage_usage(remote)Install with Tessl CLI
npx tessl i tessl/pypi-rclone-python