High Dynamic Range (HDR) Histogram for recording and analyzing value distributions with configurable precision across wide dynamic ranges.
—
Supporting classes providing specialized functionality for synchronization, encoding, data transformation, and low-level operations within the HdrHistogram ecosystem.
Asymmetric synchronization primitive enabling wait-free writers with phase-flipping readers. This utility coordinates concurrent recording with non-blocking reader phases.
public class WriterReaderPhaser {
// Constructor
public WriterReaderPhaser();
// Writer methods (wait-free)
long writerCriticalSectionEnter();
void writerCriticalSectionExit(long criticalValueAtEnter);
// Reader methods (blocking for other readers)
void readerLock();
void readerUnlock();
// Phase management
void flipPhase();
void flipPhase(long sleepTimeBeforeFlip);
// State queries
long getEpoch();
}The WriterReaderPhaser enables a sophisticated coordination pattern:
// Example: Coordinated histogram access pattern
public class CoordinatedHistogramAccess {
private final WriterReaderPhaser phaser = new WriterReaderPhaser();
private volatile Histogram activeHistogram;
private volatile Histogram inactiveHistogram;
// Wait-free recording (multiple writers supported)
public void recordValue(long value) {
long criticalValue = phaser.writerCriticalSectionEnter();
try {
// Record to current active histogram
activeHistogram.recordValue(value);
} finally {
phaser.writerCriticalSectionExit(criticalValue);
}
}
// Coordinated reading with phase flip
public Histogram getIntervalHistogram() {
phaser.readerLock();
try {
// Swap active/inactive histograms
Histogram intervalHist = inactiveHistogram;
inactiveHistogram = activeHistogram;
activeHistogram = intervalHist;
// Flip phase to notify writers of the change
phaser.flipPhase();
// Clear the new inactive histogram for next interval
inactiveHistogram.reset();
return intervalHist; // Contains previous interval's data
} finally {
phaser.readerUnlock();
}
}
}public class AdvancedPhaserUsage {
private final WriterReaderPhaser phaser = new WriterReaderPhaser();
private final AtomicLong totalRecordings = new AtomicLong();
public void demonstratePhaserPatterns() {
// Multiple concurrent writers
ExecutorService writers = Executors.newFixedThreadPool(4);
for (int i = 0; i < 4; i++) {
writers.submit(() -> {
Random random = new Random();
for (int j = 0; j < 100000; j++) {
// Wait-free writer pattern
long criticalValue = phaser.writerCriticalSectionEnter();
try {
// Simulate work with shared data structure
performWriterWork(random.nextInt(1000));
totalRecordings.incrementAndGet();
} finally {
phaser.writerCriticalSectionExit(criticalValue);
}
}
});
}
// Single reader with periodic phase flips
ScheduledExecutorService reader = Executors.newSingleThreadScheduledExecutor();
reader.scheduleAtFixedRate(() -> {
phaser.readerLock();
try {
long currentEpoch = phaser.getEpoch();
long recordings = totalRecordings.get();
System.out.printf("Epoch %d: %d total recordings%n", currentEpoch, recordings);
// Flip phase for next measurement interval
phaser.flipPhase(10); // Brief pause before flip
} finally {
phaser.readerUnlock();
}
}, 1, 1, TimeUnit.SECONDS);
// Cleanup
writers.shutdown();
reader.shutdown();
}
}Utility class for Base64 encoding and decoding of histogram data, enabling text-based storage and transmission.
public class Base64Helper {
// Encoding methods
static String printBase64Binary(byte[] bytes);
static String printBase64Binary(byte[] bytes, int offset, int length);
// Decoding methods
static byte[] parseBase64Binary(String base64String);
}// Encode binary data to Base64 string
byte[] histogramData = getHistogramBinaryData();
String base64Encoded = Base64Helper.printBase64Binary(histogramData);
System.out.println("Base64 encoded histogram:");
System.out.println(base64Encoded);
// Decode Base64 string back to binary
byte[] decodedData = Base64Helper.parseBase64Binary(base64Encoded);
// Verify integrity
boolean isIdentical = Arrays.equals(histogramData, decodedData);
System.out.printf("Data integrity preserved: %s%n", isIdentical);public class HistogramBase64Serialization {
public String serializeHistogram(AbstractHistogram histogram) {
try {
// Get compressed binary representation
int bufferSize = histogram.getNeededByteBufferCapacity();
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
int compressedSize = histogram.encodeIntoCompressedByteBuffer(buffer, 6);
// Extract compressed bytes
buffer.flip();
byte[] compressedData = new byte[compressedSize];
buffer.get(compressedData);
// Encode to Base64
return Base64Helper.printBase64Binary(compressedData);
} catch (Exception e) {
throw new RuntimeException("Failed to serialize histogram", e);
}
}
public Histogram deserializeHistogram(String base64Data) {
try {
// Decode from Base64
byte[] compressedData = Base64Helper.parseBase64Binary(base64Data);
// Decompress and decode histogram
ByteBuffer buffer = ByteBuffer.wrap(compressedData);
return Histogram.decodeFromCompressedByteBuffer(buffer, 1000);
} catch (Exception e) {
throw new RuntimeException("Failed to deserialize histogram", e);
}
}
// Example usage in configuration or messaging
public void demonstrateBase64Usage() {
// Create sample histogram
Histogram original = new Histogram(3);
Random random = new Random();
for (int i = 0; i < 10000; i++) {
original.recordValue(random.nextInt(10000));
}
// Serialize to Base64
String serialized = serializeHistogram(original);
System.out.printf("Serialized histogram length: %d characters%n", serialized.length());
// Deserialize from Base64
Histogram restored = deserializeHistogram(serialized);
// Verify equivalence
if (original.equals(restored)) {
System.out.println("✓ Base64 serialization preserves histogram data");
}
// Show usage in JSON-like structures
System.out.printf("JSON representation: {\"histogram\": \"%s\"}%n",
serialized.substring(0, Math.min(50, serialized.length())) + "...");
}
}Utility class for ZigZag encoding and decoding, used internally for efficient variable-length integer serialization.
public class ZigZagEncoding {
// Long encoding/decoding
static void putLong(ByteBuffer buffer, long value);
static long getLong(ByteBuffer buffer);
// Integer encoding/decoding
static void putInt(ByteBuffer buffer, int value);
static int getInt(ByteBuffer buffer);
}ZigZag encoding maps signed integers to unsigned integers efficiently:
public class ZigZagEncodingDemo {
public void demonstrateZigZagEncoding() {
// Test various values to show encoding efficiency
long[] testValues = {0, -1, 1, -100, 100, -10000, 10000, Long.MAX_VALUE, Long.MIN_VALUE};
ByteBuffer buffer = ByteBuffer.allocate(1024);
System.out.println("ZigZag Encoding Demonstration:");
System.out.println("Original -> Encoded -> Decoded");
for (long value : testValues) {
buffer.clear();
// Encode value
ZigZagEncoding.putLong(buffer, value);
int encodedBytes = buffer.position();
// Decode value
buffer.flip();
long decoded = ZigZagEncoding.getLong(buffer);
System.out.printf("%12d -> %d bytes -> %12d %s%n",
value, encodedBytes, decoded,
value == decoded ? "✓" : "✗");
}
}
public void compareEncodingEfficiency() {
Random random = new Random();
ByteBuffer zigzagBuffer = ByteBuffer.allocate(10000);
ByteBuffer standardBuffer = ByteBuffer.allocate(10000);
// Test with small values (common in histogram contexts)
for (int i = 0; i < 1000; i++) {
long value = random.nextInt(10000) - 5000; // Range: -5000 to +5000
// ZigZag encoding
zigzagBuffer.clear();
ZigZagEncoding.putLong(zigzagBuffer, value);
int zigzagBytes = zigzagBuffer.position();
// Standard encoding
standardBuffer.clear();
standardBuffer.putLong(value);
int standardBytes = standardBuffer.position();
if (i < 10) { // Show first 10 examples
System.out.printf("Value %6d: ZigZag=%d bytes, Standard=%d bytes%n",
value, zigzagBytes, standardBytes);
}
}
}
}public class CustomZigZagApplications {
// Efficient delta encoding using ZigZag
public byte[] encodeDeltaSequence(long[] values) {
ByteBuffer buffer = ByteBuffer.allocate(values.length * 10); // Generous allocation
long previousValue = 0;
for (long value : values) {
long delta = value - previousValue;
ZigZagEncoding.putLong(buffer, delta);
previousValue = value;
}
// Return compact byte array
byte[] result = new byte[buffer.position()];
buffer.flip();
buffer.get(result);
return result;
}
public long[] decodeDeltaSequence(byte[] encoded) {
ByteBuffer buffer = ByteBuffer.wrap(encoded);
List<Long> values = new ArrayList<>();
long currentValue = 0;
while (buffer.hasRemaining()) {
long delta = ZigZagEncoding.getLong(buffer);
currentValue += delta;
values.add(currentValue);
}
return values.stream().mapToLong(Long::longValue).toArray();
}
// Demonstrate delta encoding efficiency
public void demonstrateDeltaEncoding() {
// Create sequence with small increments (typical in histogram buckets)
long[] sequence = new long[100];
long current = 1000;
Random random = new Random();
for (int i = 0; i < sequence.length; i++) {
current += random.nextInt(10) + 1; // Small increments
sequence[i] = current;
}
// Standard encoding
int standardSize = sequence.length * 8; // 8 bytes per long
// Delta ZigZag encoding
byte[] deltaEncoded = encodeDeltaSequence(sequence);
System.out.printf("Sequence encoding comparison:%n");
System.out.printf(" Standard: %d bytes%n", standardSize);
System.out.printf(" Delta ZigZag: %d bytes%n", deltaEncoded.length);
System.out.printf(" Compression ratio: %.2f%n",
(double) deltaEncoded.length / standardSize);
// Verify correctness
long[] decoded = decodeDeltaSequence(deltaEncoded);
boolean correct = Arrays.equals(sequence, decoded);
System.out.printf(" Decoding correctness: %s%n", correct ? "✓" : "✗");
}
}Supporting classes for memory-efficient sparse array storage used by PackedHistogram variants.
public abstract class AbstractPackedArrayContext {
// Array management
abstract void setVirtualLength(int virtualLength);
abstract int getVirtualLength();
// Value access
abstract long getValueAtIndex(int index);
abstract void setValueAtIndex(int index, long value);
abstract long incrementValue(int index, long increment);
// Bulk operations
abstract int getPopulatedShortLength();
abstract boolean hasIndex(int index);
abstract void clear();
// Memory management
abstract void resize(int newVirtualLength);
abstract long getPhysicalLength();
}Non-concurrent implementation for single-threaded packed array operations.
public class PackedArrayContext extends AbstractPackedArrayContext {
// Constructors
public PackedArrayContext(int virtualLength);
public PackedArrayContext(int virtualLength, int initialPhysicalLength);
// Copy operations
void copyAndIncrement(AbstractPackedArrayContext sourceContext,
AbstractPackedArrayContext targetContext);
}Thread-safe implementation supporting concurrent packed array operations.
public class ConcurrentPackedArrayContext extends AbstractPackedArrayContext {
// Constructors
public ConcurrentPackedArrayContext(int virtualLength);
public ConcurrentPackedArrayContext(int virtualLength, int initialPhysicalLength);
// Thread-safe operations
@Override
public long incrementValue(int index, long increment);
// Concurrent copy operations
void copyAndIncrement(AbstractPackedArrayContext sourceContext,
AbstractPackedArrayContext targetContext);
}public class PackedArrayDemo {
public void demonstratePackedArrayEfficiency() {
int virtualLength = 1000000; // 1M possible indices
// Create packed array contexts
PackedArrayContext packedContext = new PackedArrayContext(virtualLength);
long[] standardArray = new long[virtualLength]; // For comparison
// Populate with sparse data (only 1% of indices used)
Random random = new Random();
Set<Integer> usedIndices = new HashSet<>();
for (int i = 0; i < virtualLength / 100; i++) { // 1% population
int index = random.nextInt(virtualLength);
long value = random.nextInt(1000) + 1;
packedContext.setValueAtIndex(index, value);
standardArray[index] = value;
usedIndices.add(index);
}
// Compare memory usage
long standardMemory = virtualLength * 8L; // 8 bytes per long
long packedMemory = packedContext.getPhysicalLength() * 8L; // Actual storage
System.out.printf("Memory usage comparison:%n");
System.out.printf(" Standard array: %,d bytes%n", standardMemory);
System.out.printf(" Packed array: %,d bytes%n", packedMemory);
System.out.printf(" Memory savings: %.1f%% (%,d bytes saved)%n",
100.0 * (standardMemory - packedMemory) / standardMemory,
standardMemory - packedMemory);
System.out.printf(" Populated indices: %,d / %,d (%.2f%%)%n",
usedIndices.size(), virtualLength, 100.0 * usedIndices.size() / virtualLength);
// Verify data integrity
boolean dataIntegrity = true;
for (int index : usedIndices) {
if (packedContext.getValueAtIndex(index) != standardArray[index]) {
dataIntegrity = false;
break;
}
}
System.out.printf(" Data integrity: %s%n", dataIntegrity ? "✓" : "✗");
}
public void demonstrateConcurrentPackedArray() {
int virtualLength = 100000;
ConcurrentPackedArrayContext context = new ConcurrentPackedArrayContext(virtualLength);
// Multiple threads incrementing sparse indices
ExecutorService executor = Executors.newFixedThreadPool(4);
CountDownLatch latch = new CountDownLatch(4);
for (int t = 0; t < 4; t++) {
final int threadId = t;
executor.submit(() -> {
try {
Random random = new Random(threadId); // Different seed per thread
for (int i = 0; i < 10000; i++) {
int index = random.nextInt(1000); // Concentrate on first 1000 indices
context.incrementValue(index, 1); // Thread-safe increment
}
} finally {
latch.countDown();
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
// Analyze results
long totalIncrements = 0;
int populatedIndices = 0;
for (int i = 0; i < 1000; i++) {
long value = context.getValueAtIndex(i);
if (value > 0) {
totalIncrements += value;
populatedIndices++;
}
}
System.out.printf("Concurrent packed array results:%n");
System.out.printf(" Total increments: %,d%n", totalIncrements);
System.out.printf(" Populated indices: %d / 1000%n", populatedIndices);
System.out.printf(" Physical memory used: %,d bytes%n",
context.getPhysicalLength() * 8);
executor.shutdown();
}
}Utility for scanning histogram log files to extract metadata and time ranges.
public class HistogramLogScanner {
// Constructor
public HistogramLogScanner(String fileName);
// Scanning methods
void scan();
double getStartTimeSec();
double getEndTimeSec();
long getTotalCount();
int getHistogramCount();
// Analysis methods
void outputTimeRange();
void outputSummary();
}public class AdvancedUtilityPatterns {
// Coordinated multi-histogram recording
public static class CoordinatedMultiHistogram {
private final WriterReaderPhaser phaser = new WriterReaderPhaser();
private final Map<String, Histogram> activeHistograms = new ConcurrentHashMap<>();
private final Map<String, Histogram> inactiveHistograms = new ConcurrentHashMap<>();
public void recordValue(String metric, long value) {
long criticalValue = phaser.writerCriticalSectionEnter();
try {
activeHistograms.computeIfAbsent(metric, k -> new Histogram(3))
.recordValue(value);
} finally {
phaser.writerCriticalSectionExit(criticalValue);
}
}
public Map<String, Histogram> getIntervalHistograms() {
phaser.readerLock();
try {
// Swap all histograms atomically
Map<String, Histogram> results = new HashMap<>(inactiveHistograms);
Map<String, Histogram> temp = inactiveHistograms;
inactiveHistograms.putAll(activeHistograms);
activeHistograms.clear();
activeHistograms.putAll(temp);
// Reset inactive histograms
inactiveHistograms.values().forEach(Histogram::reset);
phaser.flipPhase();
return results;
} finally {
phaser.readerUnlock();
}
}
}
// Efficient histogram serialization manager
public static class HistogramSerializationManager {
private final ThreadLocal<ByteBuffer> bufferCache = ThreadLocal.withInitial(
() -> ByteBuffer.allocate(65536) // 64KB per thread
);
public String serializeToBase64(AbstractHistogram histogram) {
ByteBuffer buffer = bufferCache.get();
buffer.clear();
// Ensure buffer is large enough
int needed = histogram.getNeededByteBufferCapacity();
if (buffer.capacity() < needed) {
buffer = ByteBuffer.allocate(needed);
bufferCache.set(buffer);
}
// Compress and encode
int compressedSize = histogram.encodeIntoCompressedByteBuffer(buffer, 6);
// Convert to Base64
buffer.flip();
byte[] compressed = new byte[compressedSize];
buffer.get(compressed);
return Base64Helper.printBase64Binary(compressed);
}
}
}| Utility | Performance Profile | Memory Usage | Thread Safety |
|---|---|---|---|
| WriterReaderPhaser | Wait-free writers, blocking readers | Minimal | Multi-threading coordination |
| Base64Helper | CPU-intensive encoding/decoding | Temporary allocation | Thread-safe (stateless) |
| ZigZagEncoding | Fast variable-length encoding | Minimal | Thread-safe (stateless) |
| PackedArrayContext | Fast sparse access | Dynamic compression | Single-threaded |
| ConcurrentPackedArrayContext | Slower but thread-safe | Dynamic compression | Thread-safe |
Install with Tessl CLI
npx tessl i tessl/maven-org-hdrhistogram--hdr-histogram