Extremely thin and easy-to-install Python bindings to de/compression algorithms in Rust
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Standard compression algorithms with consistent API patterns. All modules support compression levels and provide both functional and streaming interfaces.
from cramjam import gzip, zlib, deflate, bzip2, zstd, brotliAll standard compression modules follow the same interface:
# Basic compression/decompression
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer
# Direct buffer operations
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int
# Streaming classes
class Compressor:
def __init__(self, level: Optional[int] = None) -> None
def compress(self, input: bytes) -> int
def flush(self) -> Buffer
def finish(self) -> Buffer
class Decompressor:
def decompress(self, data: bytes) -> BufferStandard gzip compression with default level 6.
import cramjam
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Gzip compression.
Args:
data: Input data to compress
level: Compression level (default: 6)
output_len: Optional expected output length
Returns:
Buffer: Compressed data
"""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Gzip decompression.
Args:
data: Compressed data to decompress
output_len: Optional expected output length
Returns:
Buffer: Decompressed data
"""def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer.
Args:
input: Data to compress
output: Pre-allocated output buffer
level: Compression level (optional)
Returns:
int: Number of bytes written
"""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer.
Args:
input: Compressed data
output: Pre-allocated output buffer
Returns:
int: Number of bytes written
"""class Compressor:
"""Gzip streaming compressor."""
def __init__(self, level: Optional[int] = None) -> None:
"""Initialize compressor with optional level."""
def compress(self, input: bytes) -> int:
"""Add data to compression stream."""
def flush(self) -> Buffer:
"""Flush and return current compressed stream."""
def finish(self) -> Buffer:
"""Finish compression and return final stream."""
class Decompressor:
"""Gzip streaming decompressor."""
def decompress(self, data: bytes) -> Buffer:
"""Decompress data chunk."""import cramjam
# Basic compression with custom level
data = b"Hello World!" * 1000
compressed = cramjam.gzip.compress(data, level=9) # Maximum compression
decompressed = cramjam.gzip.decompress(compressed)
# Memory-efficient compression
output_buffer = cramjam.Buffer()
bytes_written = cramjam.gzip.compress_into(data, output_buffer, level=1) # Fast compression
# Streaming compression for large data
compressor = cramjam.gzip.Compressor(level=6)
compressor.compress(b"First chunk")
compressor.compress(b"Second chunk")
final_data = compressor.finish()Zlib compression with default level 6.
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Zlib compression."""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Zlib decompression."""
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer."""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer."""
class Compressor:
def __init__(self, level: Optional[int] = None) -> None: ...
class Decompressor: ...import cramjam
# Zlib is similar to gzip but without file headers
data = b"Zlib compression data"
compressed = cramjam.zlib.compress(data, level=5)
decompressed = cramjam.zlib.decompress(compressed)Raw deflate compression with default level 6.
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Deflate compression."""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Deflate decompression."""
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer."""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer."""class Compressor:
"""Deflate compressor with enhanced streaming support."""
def __init__(self, level: Optional[int] = None) -> None: ...
def compress(self, input: BufferProtocol) -> int:
"""Compress input and return bytes written to stream."""
def flush(self) -> Buffer:
"""Flush and return current compressed stream."""
def finish(self) -> Buffer:
"""Finish compression and return final stream."""
class Decompressor:
"""Deflate decompressor with streaming support."""
def decompress(self, input: BufferProtocol) -> Buffer:
"""Decompress data chunk."""
def flush(self) -> Buffer:
"""Flush decompressor state."""
def finish(self) -> Buffer:
"""Finish decompression."""import cramjam
# Raw deflate - no headers or checksums
data = b"Raw deflate data"
compressed = cramjam.deflate.compress(data)
decompressed = cramjam.deflate.decompress(compressed)
# Streaming with enhanced control
compressor = cramjam.deflate.Compressor(level=3)
compressor.compress(b"Chunk 1")
intermediate = compressor.flush() # Get partial result
compressor.compress(b"Chunk 2")
final = compressor.finish()Bzip2 compression with default level 6.
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Bzip2 compression."""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Bzip2 decompression."""
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer."""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer."""
class Compressor:
def __init__(self, level: Optional[int] = None) -> None: ...
class Decompressor: ...import cramjam
# Bzip2 provides good compression ratio
data = b"Bzip2 test data" * 100
compressed = cramjam.bzip2.compress(data, level=9) # Best compression
decompressed = cramjam.bzip2.decompress(compressed)Modern Zstandard compression with default level 6.
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Zstd compression."""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Zstd decompression."""
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer."""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer."""
class Compressor:
def __init__(self, level: Optional[int] = None) -> None: ...
class Decompressor: ...import cramjam
# ZSTD offers excellent speed/compression balance
data = b"Modern compression with ZSTD" * 500
compressed = cramjam.zstd.compress(data, level=3) # Fast, good compression
decompressed = cramjam.zstd.decompress(compressed)
# Streaming for large datasets
compressor = cramjam.zstd.Compressor(level=10) # Slower but better compression
for chunk in large_data_chunks:
compressor.compress(chunk)
result = compressor.finish()Web-optimized Brotli compression with default level 11.
def compress(data: BufferProtocol, level: Optional[int] = None, output_len: Optional[int] = None) -> Buffer:
"""Brotli compression.
Args:
data: Input data to compress
level: Compression level (default: 11)
output_len: Optional expected output length
"""
def decompress(data: BufferProtocol, output_len: Optional[int] = None) -> Buffer:
"""Brotli decompression."""
def compress_into(input: BufferProtocol, output: BufferProtocol, level: Optional[int] = None) -> int:
"""Compress directly into output buffer."""
def decompress_into(input: BufferProtocol, output: BufferProtocol) -> int:
"""Decompress directly into output buffer."""class Compressor:
"""Brotli compressor with streaming support."""
def __init__(self, level: Optional[int] = None) -> None: ...
def compress(self, input: BufferProtocol) -> int:
"""Compress input and return bytes written to stream."""
def flush(self) -> Buffer:
"""Flush and return current compressed stream."""
def finish(self) -> Buffer:
"""Finish compression and return final stream."""
class Decompressor:
"""Brotli decompressor with streaming support."""
def decompress(self, input: BufferProtocol) -> Buffer:
"""Decompress data chunk."""
def flush(self) -> Buffer:
"""Flush decompressor state."""
def finish(self) -> Buffer:
"""Finish decompression."""import cramjam
# Brotli is optimized for web content
web_content = b"<html><body>Web page content</body></html>" * 100
compressed = cramjam.brotli.compress(web_content, level=6) # Balanced speed/size
decompressed = cramjam.brotli.decompress(compressed)
# Streaming for web responses
compressor = cramjam.brotli.Compressor(level=4) # Fast for real-time
compressor.compress(b"HTTP response chunk 1")
partial = compressor.flush()
compressor.compress(b"HTTP response chunk 2")
final = compressor.finish()import cramjam
import time
data = b"Performance test data" * 10000
# Fast compression (level 1)
start = time.time()
fast_compressed = cramjam.zstd.compress(data, level=1)
fast_time = time.time() - start
# Balanced compression (level 6)
start = time.time()
balanced_compressed = cramjam.zstd.compress(data, level=6)
balanced_time = time.time() - start
# Best compression (level 15)
start = time.time()
best_compressed = cramjam.zstd.compress(data, level=15)
best_time = time.time() - start
print(f"Fast: {len(fast_compressed)} bytes in {fast_time:.3f}s")
print(f"Balanced: {len(balanced_compressed)} bytes in {balanced_time:.3f}s")
print(f"Best: {len(best_compressed)} bytes in {best_time:.3f}s")import cramjam
# For maximum compatibility
standard_data = cramjam.gzip.compress(data) # Widely supported
# For modern applications
efficient_data = cramjam.zstd.compress(data) # Best performance
# For web applications
web_data = cramjam.brotli.compress(data) # Optimized for HTTP
# For archival storage
archive_data = cramjam.bzip2.compress(data, level=9) # Maximum compressionInstall with Tessl CLI
npx tessl i tessl/pypi-cramjam