Python subprocess replacement that allows calling system commands as Python functions
—
Comprehensive I/O redirection and streaming capabilities including real-time output processing, piping between commands, input feeding, and output capturing. Enables sophisticated data flow control and processing pipelines.
Process command output line-by-line in real-time as it's generated, enabling live monitoring and processing.
def __call__(self, *args, _out=None, _err=None, **kwargs):
"""
Execute command with real-time output processing.
Parameters:
- _out: callable = function to process stdout lines
- _err: callable = function to process stderr lines
Returns:
str: Complete output (if no callback specified)
RunningCommand: Process object (for background execution)
"""Usage examples:
import sh
# Process output line by line
def handle_output(line):
print(f"LOG: {line.strip()}")
def handle_error(line):
print(f"ERROR: {line.strip()}")
# Monitor log file in real-time
sh.tail("-f", "/var/log/system.log", _out=handle_output, _err=handle_error)
# Process build output with custom formatting
def format_build_output(line):
if "ERROR" in line:
print(f"🔴 {line.strip()}")
elif "WARNING" in line:
print(f"🟡 {line.strip()}")
elif "SUCCESS" in line:
print(f"🟢 {line.strip()}")
else:
print(f"ℹ️ {line.strip()}")
sh.make("build", _out=format_build_output)Capture and buffer command output for later processing or analysis.
class RunningCommand:
@property
def stdout(self) -> str:
"""Complete stdout output as string."""
@property
def stderr(self) -> str:
"""Complete stderr output as string."""def __call__(self, *args, _tee=False, **kwargs):
"""
Execute command with output capturing.
Parameters:
- _tee: bool/str = True to echo output to console, "out"/"err" for specific streams
Returns:
str: Command output
"""Usage examples:
import sh
# Capture output for processing
result = sh.ps("aux")
lines = result.split('\n')
python_processes = [line for line in lines if 'python' in line]
# Capture both stdout and stderr separately
proc = sh.grep("pattern", "file.txt", _bg=True)
proc.wait()
stdout_content = proc.stdout
stderr_content = proc.stderr
if proc.exit_code == 0:
print("Found matches:", stdout_content)
else:
print("Errors occurred:", stderr_content)
# Tee output (show and capture)
output = sh.wget("http://example.com/file.zip", _tee=True)
# Output is shown on console AND captured in variableFeed input data to commands that read from stdin.
def __call__(self, *args, _in=None, **kwargs):
"""
Execute command with input feeding.
Parameters:
- _in: str/bytes/file/iterable = input data to feed to command
Returns:
str: Command output
"""Usage examples:
import sh
# Feed string input
result = sh.grep("pattern", _in="line1\nline2\npattern here\nline4")
print(result)
# Feed file content
with open("input.txt", "r") as f:
output = sh.sort(_in=f)
# Feed data from another command's output
log_data = sh.cat("/var/log/system.log")
filtered = sh.grep("ERROR", _in=log_data)
# Feed bytes data
binary_data = b"binary content here"
result = sh.hexdump(_in=binary_data)
# Feed iterative data
def data_generator():
for i in range(1000):
yield f"line {i}\n"
sh.wc("-l", _in=data_generator())Chain commands together using pipes to create processing pipelines.
def __call__(self, *args, _piped=True, **kwargs):
"""
Execute command with piping enabled.
Parameters:
- _piped: bool = True to enable piping output to other commands
Returns:
RunningCommand: Command object that can be piped to others
"""Usage examples:
import sh
# Basic piping
result = sh.grep(sh.ps("aux"), "python")
print(result)
# Complex pipeline
pipeline = sh.sort(sh.uniq(sh.cut(sh.cat("data.txt"), "-f", "2")))
print(pipeline)
# Multi-stage processing
logs = sh.cat("/var/log/system.log")
errors = sh.grep(logs, "ERROR")
recent_errors = sh.tail(errors, "-n", "10")
print(recent_errors)
# Pipeline with formatting
def format_ps_output(line):
parts = line.split()
if len(parts) > 10:
return f"PID: {parts[1]}, CMD: {parts[10]}"
return line
formatted = sh.ps("aux", _out=format_ps_output)
python_procs = sh.grep(formatted, "python")Redirect command streams to files or other destinations.
def __call__(self, *args, _out_bufsize=0, _err_bufsize=0, **kwargs):
"""
Execute command with stream buffering control.
Parameters:
- _out_bufsize: int = stdout buffer size (0=unbuffered, 1=line-buffered)
- _err_bufsize: int = stderr buffer size
Returns:
str: Command output
"""Usage examples:
import sh
from io import StringIO
# Redirect to file-like objects
stdout_buffer = StringIO()
stderr_buffer = StringIO()
def write_stdout(line):
stdout_buffer.write(line)
def write_stderr(line):
stderr_buffer.write(line)
sh.make("build", _out=write_stdout, _err=write_stderr)
# Get captured content
build_output = stdout_buffer.getvalue()
build_errors = stderr_buffer.getvalue()
# Unbuffered output for real-time processing
sh.tail("-f", "/var/log/system.log", _out_bufsize=0, _out=lambda line: print(line, end=''))Complex I/O handling patterns for sophisticated data processing workflows.
import sh
import threading
import queue
# Async output processing
output_queue = queue.Queue()
def queue_output(line):
output_queue.put(line.strip())
def process_output():
while True:
try:
line = output_queue.get(timeout=1)
# Process line here
print(f"Processed: {line}")
output_queue.task_done()
except queue.Empty:
break
# Start output processor thread
processor_thread = threading.Thread(target=process_output)
processor_thread.start()
# Run command with queued output
sh.find("/", "-name", "*.log", _out=queue_output, _bg=True)
# Wait for processing to complete
processor_thread.join()
# Filtered piping with custom logic
class FilteredPipe:
def __init__(self, filter_func):
self.filter_func = filter_func
self.buffer = []
def __call__(self, line):
if self.filter_func(line):
self.buffer.append(line.strip())
def get_results(self):
return '\n'.join(self.buffer)
# Filter for Python processes only
python_filter = FilteredPipe(lambda line: 'python' in line.lower())
sh.ps("aux", _out=python_filter)
python_processes = python_filter.get_results()
print("Python processes:")
print(python_processes)Handle binary data streams and non-text command output.
def __call__(self, *args, _decode_errors='strict', **kwargs):
"""
Execute command with binary data handling.
Parameters:
- _decode_errors: str = how to handle decode errors ('strict', 'ignore', 'replace')
Returns:
str/bytes: Command output
"""Usage examples:
import sh
# Handle binary output
binary_data = sh.cat("/bin/ls", _decode_errors='ignore')
print(f"Binary data length: {len(binary_data)}")
# Process binary streams
def handle_binary_chunk(chunk):
# Process binary data chunk
if isinstance(chunk, bytes):
print(f"Received {len(chunk)} bytes")
else:
print(f"Text chunk: {chunk[:50]}...")
# Commands that output binary data
sh.dd("if=/dev/urandom", "count=10", "bs=1024", _out=handle_binary_chunk)
# Handle mixed text/binary output
sh.hexdump("-C", "/bin/ls", _decode_errors='replace', _out=lambda line: print(line[:80]))Install with Tessl CLI
npx tessl i tessl/pypi-sh