Introduction
The Cache-Aside pattern (also known as Lazy-Loading) is a caching strategy where the application code is responsible for loading data into the cache on demand and retrieving data from the cache when available. This pattern provides fine-grained control over cache operations and is widely used in distributed systems.
Architecture Overview
Pattern Flow
public class CacheAsidePattern {
/**
* Cache-Aside Pattern Flow:
* 1. Application receives request for data
* 2. Check cache first
* 3. If cache hit → return cached data
* 4. If cache miss → load from data source
* 5. Store loaded data in cache
* 6. Return data to caller
*/
private CacheStore cache;
private DataRepository repository;
public Object getData(String key) {
// Step 1: Try cache first
Object data = cache.get(key);
if (data != null) {
return data; // Cache hit
}
// Step 2: Cache miss - load from repository
data = repository.findById(key);
if (data != null) {
// Step 3: Populate cache for future requests
cache.set(key, data);
}
return data;
}
}
Core Implementation
Cache Store Interface
public interface CacheStore<T> {
T get(String key);
void set(String key, T value);
void set(String key, T value, Duration ttl);
void delete(String key);
boolean exists(String key);
Set<String> getKeys(String pattern);
}
// Generic cache implementation
@Component
public class GenericCacheStore<T> implements CacheStore<T> {
private final Cache<String, T> cache;
private final ObjectMapper objectMapper;
private final Class<T> type;
public GenericCacheStore(Class<T> type) {
this.type = type;
this.cache = Caffeine.newBuilder()
.maximumSize(10_000)
.expireAfterWrite(30, TimeUnit.MINUTES)
.recordStats()
.build();
this.objectMapper = new ObjectMapper();
}
@Override
public T get(String key) {
try {
return cache.getIfPresent(key);
} catch (Exception e) {
// Log error but don't fail the request
System.err.println("Cache get error for key: " + key + ", error: " + e.getMessage());
return null;
}
}
@Override
public void set(String key, T value) {
set(key, value, Duration.ofMinutes(30));
}
@Override
public void set(String key, T value, Duration ttl) {
try {
cache.put(key, value);
} catch (Exception e) {
// Log error but don't fail the operation
System.err.println("Cache set error for key: " + key + ", error: " + e.getMessage());
}
}
@Override
public void delete(String key) {
try {
cache.invalidate(key);
} catch (Exception e) {
System.err.println("Cache delete error for key: " + key + ", error: " + e.getMessage());
}
}
@Override
public boolean exists(String key) {
return cache.getIfPresent(key) != null;
}
@Override
public Set<String> getKeys(String pattern) {
// Caffeine doesn't support key pattern matching directly
// In production, use Redis or similar for this feature
return Collections.emptySet();
}
public CacheStats getStats() {
return cache.stats();
}
}
Repository with Cache-Aside
@Repository
@Slf4j
public class UserRepositoryWithCache {
private final JdbcTemplate jdbcTemplate;
private final CacheStore<User> userCache;
private final CacheStore<List<User>> userListCache;
private final MetricsCollector metrics;
public UserRepositoryWithCache(JdbcTemplate jdbcTemplate,
MetricsCollector metrics) {
this.jdbcTemplate = jdbcTemplate;
this.userCache = new GenericCacheStore<>(User.class);
this.userListCache = new GenericCacheStore<>(List.class);
this.metrics = metrics;
}
public User findById(Long id) {
String cacheKey = "user:" + id;
long startTime = System.currentTimeMillis();
try {
// Step 1: Try cache first
User user = userCache.get(cacheKey);
if (user != null) {
metrics.recordCacheHit("user");
return user;
}
metrics.recordCacheMiss("user");
// Step 2: Cache miss - load from database
user = loadFromDatabase(id);
if (user != null) {
// Step 3: Populate cache
userCache.set(cacheKey, user, Duration.ofHours(1));
metrics.recordCacheSet("user");
}
return user;
} finally {
long duration = System.currentTimeMillis() - startTime;
metrics.recordOperationTime("findUserById", duration);
}
}
public List<User> findByDepartment(String department) {
String cacheKey = "users:dept:" + department.toLowerCase();
long startTime = System.currentTimeMillis();
try {
// Try cache first
@SuppressWarnings("unchecked")
List<User> users = (List<User>) userListCache.get(cacheKey);
if (users != null) {
metrics.recordCacheHit("user_list");
return users;
}
metrics.recordCacheMiss("user_list");
// Load from database
users = loadUsersFromDatabase(department);
// Populate cache
if (users != null && !users.isEmpty()) {
userListCache.set(cacheKey, users, Duration.ofMinutes(30));
metrics.recordCacheSet("user_list");
}
return users;
} finally {
long duration = System.currentTimeMillis() - startTime;
metrics.recordOperationTime("findByDepartment", duration);
}
}
public User save(User user) {
long startTime = System.currentTimeMillis();
try {
// Save to database
User savedUser = saveToDatabase(user);
if (savedUser != null && savedUser.getId() != null) {
// Invalidate relevant cache entries
invalidateUserCache(savedUser.getId());
invalidateUserListCaches();
}
return savedUser;
} finally {
long duration = System.currentTimeMillis() - startTime;
metrics.recordOperationTime("saveUser", duration);
}
}
public void delete(Long id) {
long startTime = System.currentTimeMillis();
try {
// Delete from database
deleteFromDatabase(id);
// Invalidate cache
invalidateUserCache(id);
invalidateUserListCaches();
} finally {
long duration = System.currentTimeMillis() - startTime;
metrics.recordOperationTime("deleteUser", duration);
}
}
private User loadFromDatabase(Long id) {
try {
String sql = "SELECT id, name, email, department, created_at FROM users WHERE id = ?";
return jdbcTemplate.queryForObject(sql, new Object[]{id}, (rs, rowNum) -> {
User user = new User();
user.setId(rs.getLong("id"));
user.setName(rs.getString("name"));
user.setEmail(rs.getString("email"));
user.setDepartment(rs.getString("department"));
user.setCreatedAt(rs.getTimestamp("created_at").toInstant());
return user;
});
} catch (EmptyResultDataAccessException e) {
return null;
}
}
private List<User> loadUsersFromDatabase(String department) {
String sql = "SELECT id, name, email, department, created_at FROM users WHERE department = ?";
return jdbcTemplate.query(sql, new Object[]{department}, (rs, rowNum) -> {
User user = new User();
user.setId(rs.getLong("id"));
user.setName(rs.getString("name"));
user.setEmail(rs.getString("email"));
user.setDepartment(rs.getString("department"));
user.setCreatedAt(rs.getTimestamp("created_at").toInstant());
return user;
});
}
private User saveToDatabase(User user) {
if (user.getId() == null) {
// Insert
String sql = "INSERT INTO users (name, email, department) VALUES (?, ?, ?)";
KeyHolder keyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(sql, new String[]{"id"});
ps.setString(1, user.getName());
ps.setString(2, user.getEmail());
ps.setString(3, user.getDepartment());
return ps;
}, keyHolder);
user.setId(keyHolder.getKey().longValue());
return user;
} else {
// Update
String sql = "UPDATE users SET name = ?, email = ?, department = ? WHERE id = ?";
jdbcTemplate.update(sql, user.getName(), user.getEmail(), user.getDepartment(), user.getId());
return user;
}
}
private void deleteFromDatabase(Long id) {
String sql = "DELETE FROM users WHERE id = ?";
jdbcTemplate.update(sql, id);
}
private void invalidateUserCache(Long userId) {
String cacheKey = "user:" + userId;
userCache.delete(cacheKey);
metrics.recordCacheInvalidation("user");
}
private void invalidateUserListCaches() {
// Invalidate all user list caches (simplified implementation)
// In production, you might want more granular invalidation
userListCache.delete("users:dept:engineering");
userListCache.delete("users:dept:sales");
userListCache.delete("users:dept:marketing");
metrics.recordCacheInvalidation("user_list");
}
}
Advanced Cache-Aside Patterns
Read-Through with Fallback
@Component
@Slf4j
public class ReadThroughCacheService<T> {
private final CacheStore<T> cache;
private final DataLoader<T> dataLoader;
private final CircuitBreaker circuitBreaker;
public ReadThroughCacheService(CacheStore<T> cache,
DataLoader<T> dataLoader,
CircuitBreaker circuitBreaker) {
this.cache = cache;
this.dataLoader = dataLoader;
this.circuitBreaker = circuitBreaker;
}
public T get(String key) {
return get(key, Duration.ofMinutes(30));
}
public T get(String key, Duration ttl) {
// Step 1: Try cache
T value = cache.get(key);
if (value != null) {
return value;
}
// Step 2: Cache miss - load with circuit breaker
try {
value = circuitBreaker.executeSupplier(() -> dataLoader.load(key));
if (value != null) {
// Step 3: Populate cache
cache.set(key, value, ttl);
}
return value;
} catch (Exception e) {
log.error("Failed to load data for key: {}", key, e);
return null;
}
}
public Map<String, T> bulkGet(Set<String> keys) {
Map<String, T> result = new HashMap<>();
Set<String> missingKeys = new HashSet<>();
// Step 1: Get all available from cache
for (String key : keys) {
T value = cache.get(key);
if (value != null) {
result.put(key, value);
} else {
missingKeys.add(key);
}
}
// Step 2: Load missing keys from source
if (!missingKeys.isEmpty()) {
Map<String, T> loadedData = dataLoader.bulkLoad(missingKeys);
result.putAll(loadedData);
// Step 3: Populate cache with loaded data
loadedData.forEach((key, value) ->
cache.set(key, value, Duration.ofMinutes(30)));
}
return result;
}
}
// Data loader interface
public interface DataLoader<T> {
T load(String key);
Map<String, T> bulkLoad(Set<String> keys);
}
@Component
public class UserDataLoader implements DataLoader<User> {
private final UserRepository userRepository;
public UserDataLoader(UserRepository userRepository) {
this.userRepository = userRepository;
}
@Override
public User load(String key) {
Long userId = extractUserId(key);
return userRepository.findById(userId);
}
@Override
public Map<String, User> bulkLoad(Set<String> keys) {
Set<Long> userIds = keys.stream()
.map(this::extractUserId)
.collect(Collectors.toSet());
Map<Long, User> users = userRepository.findByIds(userIds);
return users.entrySet().stream()
.collect(Collectors.toMap(
entry -> "user:" + entry.getKey(),
Map.Entry::getValue
));
}
private Long extractUserId(String key) {
return Long.parseLong(key.replace("user:", ""));
}
}
Write-Behind Caching
@Component
@Slf4j
public class WriteBehindCacheService<T> {
private final CacheStore<T> cache;
private final DataWriter<T> dataWriter;
private final ExecutorService writeExecutor;
private final Queue<WriteTask<T>> writeQueue;
private final AtomicBoolean processing;
public WriteBehindCacheService(CacheStore<T> cache, DataWriter<T> dataWriter) {
this.cache = cache;
this.dataWriter = dataWriter;
this.writeExecutor = Executors.newSingleThreadExecutor();
this.writeQueue = new ConcurrentLinkedQueue<>();
this.processing = new AtomicBoolean(false);
}
public void set(String key, T value) {
set(key, value, Duration.ofMinutes(30));
}
public void set(String key, T value, Duration ttl) {
// Update cache immediately
cache.set(key, value, ttl);
// Queue write to persistent storage
WriteTask<T> task = new WriteTask<>(key, value, System.currentTimeMillis());
writeQueue.offer(task);
// Trigger background processing if not already running
if (processing.compareAndSet(false, true)) {
writeExecutor.submit(this::processWriteQueue);
}
}
private void processWriteQueue() {
try {
while (!writeQueue.isEmpty()) {
WriteTask<T> task = writeQueue.poll();
if (task != null) {
try {
dataWriter.write(task.getKey(), task.getValue());
log.debug("Successfully wrote data for key: {}", task.getKey());
} catch (Exception e) {
log.error("Failed to write data for key: {}", task.getKey(), e);
// Optionally retry or move to dead letter queue
}
}
}
} finally {
processing.set(false);
// Check if new tasks arrived while processing
if (!writeQueue.isEmpty() && processing.compareAndSet(false, true)) {
writeExecutor.submit(this::processWriteQueue);
}
}
}
@PreDestroy
public void shutdown() {
writeExecutor.shutdown();
try {
if (!writeExecutor.awaitTermination(30, TimeUnit.SECONDS)) {
writeExecutor.shutdownNow();
}
} catch (InterruptedException e) {
writeExecutor.shutdownNow();
Thread.currentThread().interrupt();
}
}
private static class WriteTask<T> {
private final String key;
private final T value;
private final long timestamp;
public WriteTask(String key, T value, long timestamp) {
this.key = key;
this.value = value;
this.timestamp = timestamp;
}
public String getKey() { return key; }
public T getValue() { return value; }
public long getTimestamp() { return timestamp; }
}
}
public interface DataWriter<T> {
void write(String key, T value);
}
Distributed Caching with Redis
Redis Cache Implementation
@Component
public class RedisCacheStore<T> implements CacheStore<T> {
private final RedisTemplate<String, Object> redisTemplate;
private final ObjectMapper objectMapper;
private final Class<T> type;
public RedisCacheStore(RedisTemplate<String, Object> redisTemplate, Class<T> type) {
this.redisTemplate = redisTemplate;
this.objectMapper = new ObjectMapper();
this.type = type;
}
@Override
public T get(String key) {
try {
Object value = redisTemplate.opsForValue().get(key);
if (value == null) {
return null;
}
// Handle serialization based on Redis configuration
if (value instanceof String) {
return objectMapper.readValue((String) value, type);
} else {
return objectMapper.convertValue(value, type);
}
} catch (Exception e) {
System.err.println("Redis get error for key: " + key + ", error: " + e.getMessage());
return null;
}
}
@Override
public void set(String key, T value) {
set(key, value, Duration.ofMinutes(30));
}
@Override
public void set(String key, T value, Duration ttl) {
try {
String jsonValue = objectMapper.writeValueAsString(value);
redisTemplate.opsForValue().set(key, jsonValue, ttl);
} catch (Exception e) {
System.err.println("Redis set error for key: " + key + ", error: " + e.getMessage());
}
}
@Override
public void delete(String key) {
try {
redisTemplate.delete(key);
} catch (Exception e) {
System.err.println("Redis delete error for key: " + key + ", error: " + e.getMessage());
}
}
@Override
public boolean exists(String key) {
try {
Boolean exists = redisTemplate.hasKey(key);
return exists != null && exists;
} catch (Exception e) {
System.err.println("Redis exists error for key: " + key + ", error: " + e.getMessage());
return false;
}
}
@Override
public Set<String> getKeys(String pattern) {
try {
return redisTemplate.keys(pattern);
} catch (Exception e) {
System.err.println("Redis keys error for pattern: " + pattern + ", error: " + e.getMessage());
return Collections.emptySet();
}
}
public boolean acquireLock(String lockKey, String requestId, Duration timeout) {
try {
return Boolean.TRUE.equals(redisTemplate.opsForValue()
.setIfAbsent(lockKey, requestId, timeout));
} catch (Exception e) {
System.err.println("Redis lock acquisition error: " + e.getMessage());
return false;
}
}
public void releaseLock(String lockKey, String requestId) {
try {
// Only release if we own the lock
Object currentOwner = redisTemplate.opsForValue().get(lockKey);
if (requestId.equals(currentOwner)) {
redisTemplate.delete(lockKey);
}
} catch (Exception e) {
System.err.println("Redis lock release error: " + e.getMessage());
}
}
}
Cache-Aside with Distributed Lock
@Component
@Slf4j
public class DistributedCacheAsideService<T> {
private final CacheStore<T> cache;
private final DataLoader<T> dataLoader;
private final RedisCacheStore<T> redisCache;
public DistributedCacheAsideService(CacheStore<T> cache,
DataLoader<T> dataLoader,
RedisCacheStore<T> redisCache) {
this.cache = cache;
this.dataLoader = dataLoader;
this.redisCache = redisCache;
}
public T getWithLock(String key) {
return getWithLock(key, Duration.ofMinutes(30));
}
public T getWithLock(String key, Duration ttl) {
// Step 1: Try cache
T value = cache.get(key);
if (value != null) {
return value;
}
String lockKey = key + ":lock";
String requestId = UUID.randomUUID().toString();
// Step 2: Try to acquire lock
boolean lockAcquired = redisCache.acquireLock(lockKey, requestId, Duration.ofSeconds(10));
if (!lockAcquired) {
// Another thread is loading the data, wait and retry cache
return retryGet(key, 5, 100); // Retry 5 times with 100ms delay
}
try {
// Double-check cache after acquiring lock (other thread might have populated it)
value = cache.get(key);
if (value != null) {
return value;
}
// Step 3: Load from data source
value = dataLoader.load(key);
if (value != null) {
// Step 4: Populate cache
cache.set(key, value, ttl);
}
return value;
} finally {
// Step 5: Release lock
redisCache.releaseLock(lockKey, requestId);
}
}
private T retryGet(String key, int maxRetries, long retryDelayMs) {
for (int i = 0; i < maxRetries; i++) {
try {
Thread.sleep(retryDelayMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
T value = cache.get(key);
if (value != null) {
return value;
}
}
// Final attempt to load directly (bypass cache)
return dataLoader.load(key);
}
}
Metrics and Monitoring
Comprehensive Metrics Collection
@Component
public class CacheMetricsCollector {
private final MeterRegistry meterRegistry;
private final Map<String, Counter> hitCounters = new ConcurrentHashMap<>();
private final Map<String, Counter> missCounters = new ConcurrentHashMap<>();
private final Map<String, Counter> setCounters = new ConcurrentHashMap<>();
private final Map<String, Counter> invalidationCounters = new ConcurrentHashMap<>();
private final Map<String, Timer> operationTimers = new ConcurrentHashMap<>();
public CacheMetricsCollector(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
}
public void recordCacheHit(String cacheName) {
Counter counter = hitCounters.computeIfAbsent(cacheName,
name -> Counter.builder("cache.hits")
.tag("cache", name)
.register(meterRegistry));
counter.increment();
}
public void recordCacheMiss(String cacheName) {
Counter counter = missCounters.computeIfAbsent(cacheName,
name -> Counter.builder("cache.misses")
.tag("cache", name)
.register(meterRegistry));
counter.increment();
}
public void recordCacheSet(String cacheName) {
Counter counter = setCounters.computeIfAbsent(cacheName,
name -> Counter.builder("cache.sets")
.tag("cache", name)
.register(meterRegistry));
counter.increment();
}
public void recordCacheInvalidation(String cacheName) {
Counter counter = invalidationCounters.computeIfAbsent(cacheName,
name -> Counter.builder("cache.invalidations")
.tag("cache", name)
.register(meterRegistry));
counter.increment();
}
public void recordOperationTime(String operation, long durationMs) {
Timer timer = operationTimers.computeIfAbsent(operation,
op -> Timer.builder("cache.operation.duration")
.tag("operation", op)
.register(meterRegistry));
timer.record(durationMs, TimeUnit.MILLISECONDS);
}
public void recordCacheSize(String cacheName, long size) {
Gauge.builder("cache.size", () -> size)
.tag("cache", cacheName)
.register(meterRegistry);
}
public double getHitRatio(String cacheName) {
double hits = getCounterValue(hitCounters.get(cacheName));
double misses = getCounterValue(missCounters.get(cacheName));
double total = hits + misses;
return total > 0 ? hits / total : 0.0;
}
private double getCounterValue(Counter counter) {
return counter != null ? counter.count() : 0.0;
}
}
Spring Boot Configuration
Cache Configuration
@Configuration
@EnableCaching
@EnableConfigurationProperties(CacheProperties.class)
public class CacheConfig {
@Bean
public CacheManager cacheManager(CacheProperties properties) {
CaffeineCacheManager cacheManager = new CaffeineCacheManager();
cacheManager.setCaffeine(Caffeine.newBuilder()
.maximumSize(properties.getMaximumSize())
.expireAfterWrite(properties.getExpireAfterWrite())
.recordStats());
return cacheManager;
}
@Bean
public CacheStore<User> userCacheStore() {
return new GenericCacheStore<>(User.class);
}
@Bean
public CacheStore<List<User>> userListCacheStore() {
return new GenericCacheStore<>(List.class);
}
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory connectionFactory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(connectionFactory);
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
return template;
}
@Bean
public RedisCacheStore<User> redisUserCacheStore(RedisTemplate<String, Object> redisTemplate) {
return new RedisCacheStore<>(redisTemplate, User.class);
}
}
@ConfigurationProperties(prefix = "app.cache")
@Data
public class CacheProperties {
private long maximumSize = 10000;
private Duration expireAfterWrite = Duration.ofMinutes(30);
private Duration expireAfterAccess = Duration.ofMinutes(10);
private boolean recordStats = true;
private Redis redis = new Redis();
@Data
public static class Redis {
private Duration defaultTtl = Duration.ofHours(1);
private String keyPrefix = "app:cache:";
private boolean clusterEnabled = false;
}
}
Service Layer Implementation
Cached Service Example
@Service
@Slf4j
public class CachedUserService {
private final UserRepository userRepository;
private final CacheStore<User> userCache;
private final CacheStore<List<User>> userListCache;
private final CacheMetricsCollector metrics;
private final DistributedCacheAsideService<User> distributedCacheService;
public CachedUserService(UserRepository userRepository,
CacheStore<User> userCache,
CacheStore<List<User>> userListCache,
CacheMetricsCollector metrics,
DistributedCacheAsideService<User> distributedCacheService) {
this.userRepository = userRepository;
this.userCache = userCache;
this.userListCache = userListCache;
this.metrics = metrics;
this.distributedCacheService = distributedCacheService;
}
@Cacheable(value = "users", key = "#id")
public User getUserById(Long id) {
String cacheKey = "user:" + id;
return distributedCacheService.getWithLock(cacheKey);
}
public List<User> getUsersByDepartment(String department) {
String cacheKey = "users:dept:" + department.toLowerCase();
@SuppressWarnings("unchecked")
List<User> users = (List<User>) userListCache.get(cacheKey);
if (users != null) {
return users;
}
users = userRepository.findByDepartment(department);
if (users != null && !users.isEmpty()) {
userListCache.set(cacheKey, users, Duration.ofMinutes(30));
}
return users;
}
@CacheEvict(value = "users", key = "#user.id")
public User updateUser(User user) {
User updatedUser = userRepository.save(user);
// Invalidate related caches
invalidateUserCaches(updatedUser.getId());
return updatedUser;
}
@CacheEvict(value = "users", key = "#id")
public void deleteUser(Long id) {
userRepository.deleteById(id);
invalidateUserCaches(id);
}
public User getUserWithFallback(Long id) {
try {
return getUserById(id);
} catch (Exception e) {
log.warn("Cache operation failed, falling back to direct database access for user: {}", id, e);
return userRepository.findById(id).orElse(null);
}
}
private void invalidateUserCaches(Long userId) {
String userKey = "user:" + userId;
userCache.delete(userKey);
// Invalidate all department lists (simplified)
userListCache.delete("users:dept:engineering");
userListCache.delete("users:dept:sales");
userListCache.delete("users:dept:marketing");
}
// Bulk operations
public Map<Long, User> getUsersByIds(Set<Long> userIds) {
Map<String, Long> keyToIdMap = userIds.stream()
.collect(Collectors.toMap(id -> "user:" + id, id -> id));
Map<String, User> cachedUsers = bulkGetFromCache(keyToIdMap.keySet());
Set<Long> missingIds = userIds.stream()
.filter(id -> !cachedUsers.containsKey("user:" + id))
.collect(Collectors.toSet());
if (!missingIds.isEmpty()) {
Map<Long, User> loadedUsers = userRepository.findByIds(missingIds);
cacheUsers(loadedUsers);
cachedUsers.putAll(loadedUsers.entrySet().stream()
.collect(Collectors.toMap(entry -> "user:" + entry.getKey(), Map.Entry::getValue)));
}
return cachedUsers.entrySet().stream()
.collect(Collectors.toMap(
entry -> keyToIdMap.get(entry.getKey()),
Map.Entry::getValue
));
}
private Map<String, User> bulkGetFromCache(Set<String> keys) {
Map<String, User> result = new HashMap<>();
for (String key : keys) {
User user = userCache.get(key);
if (user != null) {
result.put(key, user);
}
}
return result;
}
private void cacheUsers(Map<Long, User> users) {
users.forEach((id, user) ->
userCache.set("user:" + id, user, Duration.ofMinutes(30)));
}
}
Testing
Comprehensive Test Suite
@SpringBootTest
@TestPropertySource(properties = {
"app.cache.maximum-size=1000",
"app.cache.expire-after-write=5m"
})
public class CacheAsidePatternTest {
@Autowired
private CachedUserService cachedUserService;
@Autowired
private UserRepository userRepository;
@Autowired
private CacheManager cacheManager;
@MockBean
private MetricsCollector metricsCollector;
@Test
void testCacheHit() {
User user = new User(1L, "John Doe", "[email protected]", "Engineering");
userRepository.save(user);
// First call - should cache miss and load from database
User result1 = cachedUserService.getUserById(1L);
assertNotNull(result1);
// Second call - should cache hit
User result2 = cachedUserService.getUserById(1L);
assertNotNull(result2);
verify(metricsCollector, atLeastOnce()).recordCacheHit(anyString());
}
@Test
void testCacheEviction() {
User user = new User(1L, "John Doe", "[email protected]", "Engineering");
userRepository.save(user);
// Populate cache
cachedUserService.getUserById(1L);
// Update user - should evict cache
user.setName("John Smith");
cachedUserService.updateUser(user);
// Should trigger cache miss
User updatedUser = cachedUserService.getUserById(1L);
assertEquals("John Smith", updatedUser.getName());
}
@Test
void testCacheFallback() {
// Simulate cache failure
doThrow(new RuntimeException("Cache unavailable"))
.when(metricsCollector).recordCacheHit(anyString());
User user = new User(1L, "John Doe", "[email protected]", "Engineering");
userRepository.save(user);
// Should fall back to database
User result = cachedUserService.getUserWithFallback(1L);
assertNotNull(result);
}
@Test
void testBulkOperations() {
Set<Long> userIds = Set.of(1L, 2L, 3L);
userRepository.save(new User(1L, "User1", "[email protected]", "Engineering"));
userRepository.save(new User(2L, "User2", "[email protected]", "Sales"));
userRepository.save(new User(3L, "User3", "[email protected]", "Marketing"));
Map<Long, User> users = cachedUserService.getUsersByIds(userIds);
assertEquals(3, users.size());
assertTrue(users.containsKey(1L));
assertTrue(users.containsKey(2L));
assertTrue(users.containsKey(3L));
}
}
Best Practices and Patterns
Cache Key Management
@Component
public class CacheKeyGenerator {
public String generateKey(String prefix, Object... parts) {
StringBuilder key = new StringBuilder(prefix);
for (Object part : parts) {
key.append(":").append(part.toString().toLowerCase().replace(" ", "_"));
}
return key.toString();
}
public String generateUserKey(Long userId) {
return generateKey("user", userId);
}
public String generateUserListKey(String department) {
return generateKey("users", "dept", department);
}
public String generateUserSearchKey(String query, int page, int size) {
return generateKey("users", "search", query, "page", page, "size", size);
}
}
// Cache configuration with TTL strategies
@Component
public class CacheTTLManager {
private final Map<String, Duration> ttlConfig;
public CacheTTLManager() {
this.ttlConfig = Map.of(
"user", Duration.ofHours(1),
"user_list", Duration.ofMinutes(30),
"user_search", Duration.ofMinutes(10),
"product", Duration.ofHours(2),
"product_list", Duration.ofMinutes(15)
);
}
public Duration getTTL(String cacheType) {
return ttlConfig.getOrDefault(cacheType, Duration.ofMinutes(30));
}
public Duration getShortTTL(String cacheType) {
return Duration.ofMinutes(5); // For frequently changing data
}
public Duration getLongTTL(String cacheType) {
return Duration.ofHours(24); // For relatively static data
}
}
Conclusion
The Cache-Aside pattern provides:
- Fine-grained control over cache operations
- Lazy loading - data is cached only when requested
- Cache failure resilience - system continues working without cache
- Flexible invalidation strategies
- Distributed caching support with proper locking
- Comprehensive monitoring and metrics
This implementation demonstrates production-ready cache-aside patterns with proper error handling, metrics collection, distributed locking, and Spring Boot integration. The pattern is particularly effective for read-heavy workloads and can significantly improve application performance while maintaining data consistency.