A high performance caching library for Java providing Google Guava-inspired API with advanced eviction policies and comprehensive features
—
Caffeine provides two main synchronous cache interfaces: Cache for manual cache operations and LoadingCache for automatic value loading. Both interfaces are thread-safe and designed for high-concurrency access patterns.
The Cache interface provides basic manual caching operations where values must be explicitly computed and stored.
public interface Cache<K, V> {
// Retrieval operations
V getIfPresent(K key);
V get(K key, Function<? super K, ? extends V> mappingFunction);
Map<K, V> getAllPresent(Iterable<? extends K> keys);
Map<K, V> getAll(Iterable<? extends K> keys, Function<Set<? extends K>, Map<K, V>> mappingFunction);
// Storage operations
void put(K key, V value);
void putAll(Map<? extends K, ? extends V> map);
// Removal operations
void invalidate(K key);
void invalidateAll(Iterable<? extends K> keys);
void invalidateAll();
// Inspection operations
long estimatedSize();
CacheStats stats();
ConcurrentMap<K, V> asMap();
void cleanUp();
Policy<K, V> policy();
}Cache<String, String> cache = Caffeine.newBuilder()
.maximumSize(1000)
.build();
// Simple retrieval - returns null if not present
String value = cache.getIfPresent("key1");
// Get with compute function - computes and stores if missing
String computed = cache.get("key2", k -> "computed_" + k);
// Bulk retrieval of present values only
Set<String> keys = Set.of("key1", "key2", "key3");
Map<String, String> present = cache.getAllPresent(keys);
// Bulk retrieval with compute function for missing values
Map<String, String> all = cache.getAll(keys, missingKeys -> {
Map<String, String> result = new HashMap<>();
for (String key : missingKeys) {
result.put(key, "bulk_computed_" + key);
}
return result;
});// Store single value
cache.put("key", "value");
// Store multiple values
Map<String, String> data = Map.of(
"key1", "value1",
"key2", "value2"
);
cache.putAll(data);// Remove single entry
cache.invalidate("key1");
// Remove multiple entries
cache.invalidateAll(Set.of("key1", "key2"));
// Remove all entries
cache.invalidateAll();// Get concurrent map view
ConcurrentMap<String, String> mapView = cache.asMap();
// Standard map operations work on the cache
mapView.putIfAbsent("key", "value");
mapView.computeIfPresent("key", (k, v) -> v.toUpperCase());
mapView.merge("key", "suffix", (oldVal, newVal) -> oldVal + "_" + newVal);
// Iteration over cache entries
for (Map.Entry<String, String> entry : mapView.entrySet()) {
System.out.println(entry.getKey() + " -> " + entry.getValue());
}// Manual cleanup - performs maintenance operations
cache.cleanUp();
// Get approximate size
long size = cache.estimatedSize();
// Access cache statistics (if enabled)
if (cache.stats() != CacheStats.empty()) {
CacheStats stats = cache.stats();
System.out.println("Hit rate: " + stats.hitRate());
System.out.println("Miss count: " + stats.missCount());
}
// Access cache policies
Policy<String, String> policy = cache.policy();
if (policy.eviction().isPresent()) {
System.out.println("Max size: " + policy.eviction().get().getMaximum());
}The LoadingCache interface extends Cache and provides automatic value loading using a CacheLoader.
public interface LoadingCache<K, V> extends Cache<K, V> {
// Automatic loading operations
V get(K key);
Map<K, V> getAll(Iterable<? extends K> keys);
// Refresh operations
CompletableFuture<V> refresh(K key);
CompletableFuture<Map<K, V>> refreshAll(Iterable<? extends K> keys);
}LoadingCache<String, String> loadingCache = Caffeine.newBuilder()
.maximumSize(1000)
.build(key -> {
// Simulate expensive computation
Thread.sleep(100);
return "loaded_" + key.toUpperCase();
});
// Get value - loads automatically if not present
String value = loadingCache.get("key1"); // Returns "loaded_KEY1"
// Bulk loading - uses CacheLoader.loadAll() if implemented
Map<String, String> values = loadingCache.getAll(Set.of("key1", "key2", "key3"));LoadingCache<String, UserData> userCache = Caffeine.newBuilder()
.maximumSize(1000)
.build(new CacheLoader<String, UserData>() {
@Override
public UserData load(String userId) throws Exception {
return database.fetchUser(userId);
}
@Override
public Map<String, UserData> loadAll(Set<? extends String> userIds) throws Exception {
// Efficient bulk loading from database
return database.fetchUsers(userIds);
}
});
// Uses efficient bulk loading
Map<String, UserData> users = userCache.getAll(Set.of("user1", "user2", "user3"));LoadingCache<String, String> refreshingCache = Caffeine.newBuilder()
.maximumSize(1000)
.refreshAfterWrite(Duration.ofMinutes(5))
.build(key -> fetchFromSlowService(key));
// Asynchronous refresh - old value remains available during refresh
CompletableFuture<String> refreshFuture = refreshingCache.refresh("key1");
// The cache continues serving the old value while refreshing
String currentValue = refreshingCache.get("key1"); // Returns old value immediately
// Wait for refresh to complete if needed
String newValue = refreshFuture.get();
// Bulk refresh operations
Set<String> keysToRefresh = Set.of("key1", "key2", "key3");
CompletableFuture<Map<String, String>> bulkRefreshFuture = refreshingCache.refreshAll(keysToRefresh);
// Cache continues serving old values while refreshing all keys
Map<String, String> currentValues = refreshingCache.getAll(keysToRefresh);
// Wait for all refreshes to complete
Map<String, String> newValues = bulkRefreshFuture.get();Cache<String, String> cache = Caffeine.newBuilder()
.maximumSize(1000)
.build();
try {
// Compute function can throw exceptions
String value = cache.get("key", k -> {
if (k.equals("invalid")) {
throw new IllegalArgumentException("Invalid key");
}
return "valid_" + k;
});
} catch (RuntimeException e) {
// Handle computation exceptions
System.err.println("Failed to compute value: " + e.getMessage());
}LoadingCache<String, String> loadingCache = Caffeine.newBuilder()
.maximumSize(1000)
.build(key -> {
if (key.startsWith("error_")) {
throw new RuntimeException("Simulated loading error");
}
return "loaded_" + key;
});
try {
String value = loadingCache.get("error_key");
} catch (CompletionException e) {
// Loading exceptions are wrapped in CompletionException
Throwable cause = e.getCause();
System.err.println("Loading failed: " + cause.getMessage());
}All cache operations are thread-safe and designed for high-concurrency access:
Cache<String, String> cache = Caffeine.newBuilder()
.maximumSize(1000)
.build();
// Multiple threads can safely access the cache concurrently
ExecutorService executor = Executors.newFixedThreadPool(10);
for (int i = 0; i < 100; i++) {
final int threadId = i;
executor.submit(() -> {
// Thread-safe operations
cache.put("key_" + threadId, "value_" + threadId);
String value = cache.getIfPresent("key_" + threadId);
cache.get("computed_" + threadId, k -> "computed_" + k);
});
}Cache operations are atomic at the individual operation level:
// These operations are atomic
cache.get("key", k -> expensiveComputation(k)); // Only computed once per key
cache.asMap().computeIfAbsent("key", k -> defaultValue(k)); // Atomic compute-if-absent
cache.asMap().merge("key", "addition", (old, new_val) -> old + new_val); // Atomic mergeConcurrentHashMapInstall with Tessl CLI
npx tessl i tessl/maven-com-github-ben-manes-caffeine--caffeine