CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-org-hdrhistogram--hdr-histogram

High Dynamic Range (HDR) Histogram for recording and analyzing value distributions with configurable precision across wide dynamic ranges.

Pending
Overview
Eval results
Files

serialization.mddocs/

Serialization and Persistence

Comprehensive encoding, decoding, and log processing capabilities for histogram data persistence and exchange. HdrHistogram provides multiple formats and tools for storing, transmitting, and processing histogram data.

EncodableHistogram

Abstract base class for histograms that support encoding and decoding operations.

public abstract class EncodableHistogram implements Serializable {
    
    // Encoding methods
    abstract int getNeededByteBufferCapacity();
    abstract int encodeIntoByteBuffer(ByteBuffer targetBuffer);
    abstract int encodeIntoCompressedByteBuffer(ByteBuffer targetBuffer, int compressionLevel);
    
    // Metadata methods
    abstract long getStartTimeStamp();
    abstract void setStartTimeStamp(long startTimeStamp);
    abstract long getEndTimeStamp();
    abstract void setEndTimeStamp(long endTimeStamp);
    abstract String getTag();
    abstract void setTag(String tag);
    abstract double getMaxValueAsDouble();
    
    // Static decoding methods
    static EncodableHistogram decodeFromByteBuffer(ByteBuffer buffer, 
                                                  long minBarForHighestTrackableValue);
    static EncodableHistogram decodeFromCompressedByteBuffer(ByteBuffer buffer, 
                                                            long minBarForHighestTrackableValue);
}

Binary Encoding and Decoding

Encoding Histograms to ByteBuffer

// Encode histogram to binary format
Histogram histogram = createSampleHistogram();

// Calculate required buffer size
int bufferSize = histogram.getNeededByteBufferCapacity();
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);

// Encode histogram
int bytesWritten = histogram.encodeIntoByteBuffer(buffer);
System.out.printf("Encoded histogram in %d bytes%n", bytesWritten);

// Prepare buffer for reading
buffer.flip();

// The buffer now contains the encoded histogram
byte[] histogramData = new byte[bytesWritten];
buffer.get(histogramData);

Compressed Encoding

// Encode with compression for smaller size
Histogram histogram = createSampleHistogram();

int bufferSize = histogram.getNeededByteBufferCapacity();
ByteBuffer compressedBuffer = ByteBuffer.allocate(bufferSize);

// Compress with deflate compression (0-9 compression levels)
int compressedSize = histogram.encodeIntoCompressedByteBuffer(compressedBuffer, 6);
System.out.printf("Compressed histogram to %d bytes (level 6)%n", compressedSize);

// Compare compression ratios
int uncompressedSize = histogram.getNeededByteBufferCapacity();
double compressionRatio = (double) compressedSize / uncompressedSize;
System.out.printf("Compression ratio: %.2f (%.1f%% reduction)%n", 
    compressionRatio, (1.0 - compressionRatio) * 100);

Decoding Histograms from ByteBuffer

// Decode standard encoded histogram
ByteBuffer encodedBuffer = getEncodedHistogramBuffer();
Histogram decoded = Histogram.decodeFromByteBuffer(encodedBuffer, 1000);

System.out.printf("Decoded histogram: %d samples, max=%d%n",
    decoded.getTotalCount(), decoded.getMaxValue());

// Decode compressed histogram  
ByteBuffer compressedBuffer = getCompressedHistogramBuffer();
Histogram decompressed = Histogram.decodeFromCompressedByteBuffer(compressedBuffer, 1000);

// Verify integrity
if (decoded.equals(decompressed)) {
    System.out.println("Compression/decompression preserves data integrity");
}

Polymorphic Decoding

// Decode without knowing specific histogram type
ByteBuffer unknownHistogramBuffer = getHistogramBuffer();

// Returns appropriate type (Histogram or DoubleHistogram)
EncodableHistogram histogram = EncodableHistogram.decodeFromByteBuffer(unknownHistogramBuffer, 1000);

if (histogram instanceof Histogram) {
    Histogram intHistogram = (Histogram) histogram;
    System.out.printf("Integer histogram: P95=%d%n", 
        intHistogram.getValueAtPercentile(95.0));
} else if (histogram instanceof DoubleHistogram) {
    DoubleHistogram doubleHistogram = (DoubleHistogram) histogram;
    System.out.printf("Double histogram: P95=%.3f%n", 
        doubleHistogram.getValueAtPercentile(95.0));
}

Base64 String Encoding

Base64 Compression and Encoding

// Encode histogram as Base64 compressed string
public String encodeHistogramToBase64(AbstractHistogram histogram) {
    int bufferSize = histogram.getNeededByteBufferCapacity();
    ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
    
    // Encode with maximum compression
    int compressedSize = histogram.encodeIntoCompressedByteBuffer(buffer, 9);
    
    // Extract compressed bytes
    buffer.flip();
    byte[] compressedBytes = new byte[compressedSize];
    buffer.get(compressedBytes);
    
    // Convert to Base64
    return Base64Helper.printBase64Binary(compressedBytes);
}

// Decode histogram from Base64 string
public Histogram decodeHistogramFromBase64(String base64String) {
    // Parse Base64 to bytes
    byte[] compressedBytes = Base64Helper.parseBase64Binary(base64String);
    
    // Create buffer and decode
    ByteBuffer buffer = ByteBuffer.wrap(compressedBytes);
    return Histogram.decodeFromCompressedByteBuffer(buffer, 1000);
}

Convenient String Serialization

// Using built-in string serialization
Histogram histogram = createSampleHistogram();

// Encode to compressed byte buffer, then Base64
int bufferSize = histogram.getNeededByteBufferCapacity();
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
int compressedSize = histogram.encodeIntoCompressedByteBuffer(buffer, 9);

buffer.flip();
byte[] compressedBytes = new byte[compressedSize];
buffer.get(compressedBytes);
String encoded = Base64Helper.printBase64Binary(compressedBytes);

System.out.println("Encoded histogram: " + encoded);

// Direct decoding from string
Histogram restored = Histogram.fromString(encoded);

// Verify restoration
System.out.printf("Original: count=%d, P95=%d%n", 
    histogram.getTotalCount(), histogram.getValueAtPercentile(95.0));
System.out.printf("Restored: count=%d, P95=%d%n", 
    restored.getTotalCount(), restored.getValueAtPercentile(95.0));

Histogram Log Processing

HistogramLogWriter

Class for writing histograms to log files in standard format.

public class HistogramLogWriter {
    
    // Constructors
    public HistogramLogWriter(String fileName) throws FileNotFoundException;
    public HistogramLogWriter(File outputFile) throws FileNotFoundException;
    public HistogramLogWriter(PrintStream outputStream);
    
    // Writing methods
    void outputComment(String comment);
    void outputLegend();
    void outputStartTime(long startTime);
    void outputBaseTime(long baseTime);
    void outputIntervalHistogram(EncodableHistogram histogram);
    
    // Utility methods
    void close();
}

Writing Histogram Logs

// Create log writer
try (FileOutputStream fos = new FileOutputStream("performance.hlog");
     PrintStream ps = new PrintStream(fos)) {
    
    HistogramLogWriter logWriter = new HistogramLogWriter(ps);
    
    // Write log header
    logWriter.outputComment("Performance measurement log");
    logWriter.outputComment("Application: MyService v1.2.3");
    logWriter.outputStartTime(System.currentTimeMillis());
    logWriter.outputLegend();
    
    // Simulate interval recording
    Recorder recorder = new Recorder(3);
    long startTime = System.currentTimeMillis();
    
    for (int interval = 0; interval < 60; interval++) {  // 60 intervals
        // Simulate load for 1 second
        simulateLoad(recorder, 1000);
        
        // Get interval histogram  
        Histogram intervalHist = recorder.getIntervalHistogram();
        
        // Set timestamps for this interval
        long endTime = System.currentTimeMillis();
        intervalHist.setStartTimeStamp(startTime);
        intervalHist.setEndTimeStamp(endTime);
        intervalHist.setTag("interval-" + interval);
        
        // Write to log
        logWriter.outputIntervalHistogram(intervalHist);
        
        startTime = endTime;
    }
    
    logWriter.outputComment("Log completed successfully");
}

HistogramLogReader

Class for reading histogram logs and processing interval data.

public class HistogramLogReader implements Iterable<EncodableHistogram> {
    
    // Constructors
    public HistogramLogReader(String fileName) throws FileNotFoundException;
    public HistogramLogReader(File inputFile) throws FileNotFoundException;
    public HistogramLogReader(InputStream inputStream);
    
    // Reading methods
    boolean hasNext();
    EncodableHistogram nextIntervalHistogram();
    EncodableHistogram nextIntervalHistogram(double startTimeSec, double endTimeSec);
    
    // Iterator support
    Iterator<EncodableHistogram> iterator();
    
    // Utility methods
    void close();
}

Reading and Processing Histogram Logs

// Read histogram log file
try (HistogramLogReader logReader = new HistogramLogReader("performance.hlog")) {
    
    System.out.println("Processing histogram log...");
    
    long totalSamples = 0;
    double maxP99 = 0;
    List<Double> p95Values = new ArrayList<>();
    
    // Process each interval histogram
    for (EncodableHistogram histogram : logReader) {
        if (histogram instanceof Histogram) {
            Histogram intHist = (Histogram) histogram;
            
            totalSamples += intHist.getTotalCount();
            long p95 = intHist.getValueAtPercentile(95.0);
            long p99 = intHist.getValueAtPercentile(99.0);
            
            p95Values.add((double) p95);
            maxP99 = Math.max(maxP99, p99);
            
            System.out.printf("Interval %s: count=%d, P95=%d, P99=%d%n",
                intHist.getTag(), intHist.getTotalCount(), p95, p99);
        }
    }
    
    // Generate summary statistics
    double avgP95 = p95Values.stream().mapToDouble(Double::doubleValue).average().orElse(0);
    
    System.out.printf("Summary:%n");
    System.out.printf("  Total samples: %d%n", totalSamples);
    System.out.printf("  Average P95: %.1f%n", avgP95);
    System.out.printf("  Maximum P99: %.1f%n", maxP99);
}

Time-Range Filtering

// Read histograms within specific time range
try (HistogramLogReader logReader = new HistogramLogReader("performance.hlog")) {
    
    // Define time range (seconds since epoch)
    double startTime = System.currentTimeMillis() / 1000.0 - 3600;  // Last hour
    double endTime = System.currentTimeMillis() / 1000.0;
    
    System.out.printf("Analyzing histograms from %.0f to %.0f%n", startTime, endTime);
    
    EncodableHistogram histogram;
    while ((histogram = logReader.nextIntervalHistogram(startTime, endTime)) != null) {
        System.out.printf("Processing histogram: %s (start=%.0f, end=%.0f)%n",
            histogram.getTag(),
            histogram.getStartTimeStamp() / 1000.0,
            histogram.getEndTimeStamp() / 1000.0);
            
        analyzeHistogram(histogram);
    }
}

Advanced Log Processing

HistogramLogProcessor

Configurable processor for advanced log analysis and transformation.

public class HistogramLogProcessor {
    
    // Constructor
    public HistogramLogProcessor();
    
    // Event handler interfaces
    interface EventHandler {
        void handleHistogramInterval(EncodableHistogram histogram);
    }
    
    // Processing methods
    void processLogFile(String inputFileName, EventHandler handler);
    void processLogFile(File inputFile, EventHandler handler);
}

Custom Log Processing

// Custom histogram log processor
HistogramLogProcessor processor = new HistogramLogProcessor();

// Define custom processing logic
HistogramLogProcessor.EventHandler customHandler = histogram -> {
    if (histogram instanceof Histogram) {
        Histogram h = (Histogram) histogram;
        
        // Custom analysis logic
        long p99 = h.getValueAtPercentile(99.0);
        if (p99 > 10000) {  // Alert if P99 > 10ms
            System.err.printf("ALERT: High P99 latency %d μs in interval %s%n",
                p99, h.getTag());
        }
        
        // Custom metrics extraction
        extractCustomMetrics(h);
    }
};

// Process log with custom handler
processor.processLogFile("performance.hlog", customHandler);

Log Aggregation and Analysis

public class HistogramLogAnalyzer {
    
    public void analyzeLongTermTrends(String logFileName) {
        Map<String, List<Long>> dailyP95s = new HashMap<>();
        
        try (HistogramLogReader reader = new HistogramLogReader(logFileName)) {
            for (EncodableHistogram histogram : reader) {
                if (histogram instanceof Histogram) {
                    Histogram h = (Histogram) histogram;
                    
                    // Extract date from timestamp
                    LocalDate date = Instant.ofEpochMilli(h.getStartTimeStamp())
                        .atZone(ZoneId.systemDefault()).toLocalDate();
                    String dateKey = date.toString();
                    
                    // Collect P95 values by date
                    dailyP95s.computeIfAbsent(dateKey, k -> new ArrayList<>())
                             .add(h.getValueAtPercentile(95.0));
                }
            }
        }
        
        // Analyze daily trends
        dailyP95s.entrySet().stream()
                 .sorted(Map.Entry.comparingByKey())
                 .forEach(entry -> {
                     String date = entry.getKey();
                     List<Long> p95s = entry.getValue();
                     
                     double avgP95 = p95s.stream().mapToLong(Long::longValue).average().orElse(0);
                     long maxP95 = p95s.stream().mapToLong(Long::longValue).max().orElse(0);
                     
                     System.out.printf("%s: avg=%.1f μs, max=%d μs, intervals=%d%n",
                         date, avgP95, maxP95, p95s.size());
                 });
    }
    
    public void detectAnomalies(String logFileName, double thresholdMultiplier) {
        List<Double> p95Values = new ArrayList<>();
        
        // First pass: collect all P95 values
        try (HistogramLogReader reader = new HistogramLogReader(logFileName)) {
            for (EncodableHistogram histogram : reader) {
                if (histogram instanceof Histogram) {
                    Histogram h = (Histogram) histogram;
                    p95Values.add((double) h.getValueAtPercentile(95.0));
                }
            }
        }
        
        // Calculate baseline statistics
        double mean = p95Values.stream().mapToDouble(Double::doubleValue).average().orElse(0);
        double stdDev = calculateStandardDeviation(p95Values, mean);
        double anomalyThreshold = mean + (thresholdMultiplier * stdDev);
        
        System.out.printf("Anomaly detection: threshold=%.1f (mean=%.1f, stddev=%.1f)%n",
            anomalyThreshold, mean, stdDev);
        
        // Second pass: detect anomalies
        try (HistogramLogReader reader = new HistogramLogReader(logFileName)) {
            for (EncodableHistogram histogram : reader) {
                if (histogram instanceof Histogram) {
                    Histogram h = (Histogram) histogram;
                    long p95 = h.getValueAtPercentile(95.0);
                    
                    if (p95 > anomalyThreshold) {
                        System.out.printf("ANOMALY: %s P95=%d (%.1fx baseline)%n",
                            h.getTag(), p95, p95 / mean);
                    }
                }
            }
        }
    }
}

Serialization Best Practices

Metadata Management

public void prepareHistogramForSerialization(EncodableHistogram histogram, 
                                           String tag, 
                                           long startTime, 
                                           long endTime) {
    // Set comprehensive metadata
    histogram.setTag(tag);
    histogram.setStartTimeStamp(startTime);
    histogram.setEndTimeStamp(endTime);
    
    System.out.printf("Prepared histogram for serialization:%n");
    System.out.printf("  Tag: %s%n", histogram.getTag());
    System.out.printf("  Time range: %d - %d (%d ms duration)%n",
        startTime, endTime, endTime - startTime);
    System.out.printf("  Max value: %.0f%n", histogram.getMaxValueAsDouble());
}

Compression Level Optimization

public void analyzeCompressionLevels(AbstractHistogram histogram) {
    System.out.println("Compression Level Analysis:");
    
    int uncompressedSize = histogram.getNeededByteBufferCapacity();
    System.out.printf("Uncompressed: %d bytes%n", uncompressedSize);
    
    for (int level = 0; level <= 9; level++) {
        ByteBuffer buffer = ByteBuffer.allocate(uncompressedSize);
        
        long startTime = System.nanoTime();
        int compressedSize = histogram.encodeIntoCompressedByteBuffer(buffer, level);
        long endTime = System.nanoTime();
        
        double ratio = (double) compressedSize / uncompressedSize;
        double timeMs = (endTime - startTime) / 1e6;
        
        System.out.printf("Level %d: %d bytes (%.2f ratio) in %.2f ms%n",
            level, compressedSize, ratio, timeMs);
    }
}

Batch Processing Optimization

public class BatchHistogramProcessor {
    
    public void processBatch(List<AbstractHistogram> histograms, String outputFile) {
        try (FileOutputStream fos = new FileOutputStream(outputFile);
             BufferedOutputStream bos = new BufferedOutputStream(fos, 65536);
             PrintStream ps = new PrintStream(bos)) {
            
            HistogramLogWriter writer = new HistogramLogWriter(ps);
            
            // Write batch header
            writer.outputComment("Batch processing: " + histograms.size() + " histograms");
            writer.outputLegend();
            
            // Process histograms with progress tracking
            for (int i = 0; i < histograms.size(); i++) {
                AbstractHistogram histogram = histograms.get(i);
                
                // Set batch metadata
                histogram.setTag("batch-" + i);
                
                writer.outputIntervalHistogram(histogram);
                
                // Progress reporting
                if (i % 100 == 0) {
                    System.out.printf("Processed %d/%d histograms%n", i, histograms.size());
                }
            }
            
            writer.outputComment("Batch processing completed");
            
        } catch (IOException e) {
            throw new RuntimeException("Failed to process batch", e);
        }
    }
}

Memory-Efficient Serialization

Streaming Serialization

public class StreamingHistogramSerializer {
    
    public void serializeStream(Iterator<AbstractHistogram> histograms, 
                               OutputStream output) throws IOException {
        
        try (BufferedOutputStream buffered = new BufferedOutputStream(output, 32768);
             PrintStream ps = new PrintStream(buffered)) {
            
            HistogramLogWriter writer = new HistogramLogWriter(ps);
            writer.outputLegend();
            
            while (histograms.hasNext()) {
                AbstractHistogram histogram = histograms.next();
                writer.outputIntervalHistogram(histogram);
                
                // Force periodic flushing to manage memory
                if (Math.random() < 0.01) {  // 1% chance
                    ps.flush();
                }
            }
        }
    }
}

Format Comparison

FormatSizeSpeedUse Case
Uncompressed BinaryLargeFastestReal-time systems
Compressed Binary (Level 1)MediumFastNetwork transmission
Compressed Binary (Level 6)SmallMediumStorage optimization
Compressed Binary (Level 9)SmallestSlowArchive storage
Base64 CompressedSmall + TextMediumJSON/XML embedding

Install with Tessl CLI

npx tessl i tessl/maven-org-hdrhistogram--hdr-histogram

docs

concurrent-histograms.md

core-operations.md

double-histograms.md

index.md

iterators.md

recorders.md

serialization.md

specialized-variants.md

utilities.md

tile.json