The Truth About Redis Cache: Common Mistakes That Kill Performance

Discover common Redis caching mistakes that hurt performance and learn how to implement effective caching strategies for better application performance.

Think you're using Redis effectively? Here's why your caching strategy might be hurting more than helping, and how to fix it.


Common Redis Mistakes

1. Caching Everything

// ❌ BAD: Caching without strategy
async function getUserData(userId) {
  const cached = await redis.get(`user:${userId}`);
  if (cached) return JSON.parse(cached);
  
  const user = await db.users.findById(userId);
  await redis.set(`user:${userId}`, JSON.stringify(user));
  return user;
}

2. Wrong TTL Strategy

// ❌ BAD: Same TTL for everything
await redis.set('user:profile', profile, 'EX', 3600);  // 1 hour
await redis.set('user:settings', settings, 'EX', 3600); // 1 hour
await redis.set('user:posts', posts, 'EX', 3600);      // 1 hour

// ✅ BETTER: TTL based on data type
await redis.set('user:profile', profile, 'EX', 3600);     // 1 hour
await redis.set('user:settings', settings, 'EX', 86400);  // 1 day
await redis.set('user:posts', posts, 'EX', 300);         // 5 minutes

Smart Caching Patterns

1. Cache Aside Pattern

class UserService {
  async getUser(userId) {
    // Try cache first
    const cached = await this.cache.get(`user:${userId}`);
    if (cached) {
      metrics.increment('cache.hit');
      return JSON.parse(cached);
    }

    // Cache miss: get from DB
    metrics.increment('cache.miss');
    const user = await this.db.users.findById(userId);
    if (!user) return null;

    // Store in cache with appropriate TTL
    await this.cache.set(
      `user:${userId}`, 
      JSON.stringify(user), 
      'EX', 
      this.getUserTTL(user)
    );

    return user;
  }

  getUserTTL(user) {
    // Dynamic TTL based on user properties
    if (user.isAdmin) return 300; // 5 minutes for admins
    if (user.isPremium) return 1800; // 30 minutes for premium
    return 3600; // 1 hour for regular users
  }
}

2. Write-Through Cache

class UserRepository {
  async updateUser(userId, data) {
    // Start transaction
    const multi = this.redis.multi();
    
    // Update DB
    const user = await this.db.users.update(userId, data);
    
    // Update cache with new data
    multi.set(
      `user:${userId}`, 
      JSON.stringify(user), 
      'EX', 
      this.getTTL(user)
    );
    
    // Invalidate related caches
    multi.del(`userPosts:${userId}`);
    multi.del(`userStats:${userId}`);
    
    // Execute all cache operations
    await multi.exec();
    
    return user;
  }
}

Memory Management

1. Key Expiration Strategy

// ❌ BAD: Keys without expiration
await redis.set('user:stats', stats);

// ✅ BETTER: Always set expiration
await redis.set('user:stats', stats, 'EX', 3600);

// ✅ BEST: Use maxmemory-policy
config set maxmemory-policy allkeys-lru
config set maxmemory 2gb

2. Smart Key Design

// ❌ BAD: Long key names
'user:1234:preferences:theme:settings:layout:dashboard'

// ✅ BETTER: Compressed keys
'u:1234:pref'

// ✅ BEST: Structured keys for scanning
'org:123:u:456:pref'

Performance Optimization

1. Pipelining

// ❌ BAD: Multiple separate calls
const user = await redis.get('user:123');
const posts = await redis.get('posts:123');
const stats = await redis.get('stats:123');

// ✅ BETTER: Use pipelining
const pipeline = redis.pipeline();
pipeline.get('user:123');
pipeline.get('posts:123');
pipeline.get('stats:123');
const [user, posts, stats] = await pipeline.exec();

2. Batch Operations

// ❌ BAD: Multiple SET operations
for (const user of users) {
  await redis.set(`user:${user.id}`, JSON.stringify(user));
}

// ✅ BETTER: Use MSET
const userMap = users.reduce((map, user) => {
  map[`user:${user.id}`] = JSON.stringify(user);
  return map;
}, {});
await redis.mset(userMap);

Monitoring and Debugging

1. Cache Hit Ratio

class CacheMetrics {
  constructor() {
    this.hits = 0;
    this.misses = 0;
  }

  recordHit() {
    this.hits++;
  }

  recordMiss() {
    this.misses++;
  }

  getHitRatio() {
    const total = this.hits + this.misses;
    return total ? this.hits / total : 0;
  }
}

2. Memory Usage Tracking

async function getRedisMemoryStats() {
  const info = await redis.info('memory');
  return {
    used_memory: info.used_memory_human,
    peak_memory: info.used_memory_peak_human,
    fragmentation: info.mem_fragmentation_ratio
  };
}

Best Practices

1. Serialization

// ❌ BAD: JSON for everything
await redis.set('user:123', JSON.stringify(bigObject));

// ✅ BETTER: Use appropriate serialization
const msgpack = require('msgpack-lite');
await redis.set('user:123', msgpack.encode(bigObject));

2. Error Handling

class RedisCacheManager {
  async get(key) {
    try {
      const value = await this.redis.get(key);
      return value ? JSON.parse(value) : null;
    } catch (error) {
      this.logger.error('Redis get error', { key, error });
      return null; // Graceful degradation
    }
  }
}

Conclusion

Effective Redis caching requires:

  • Thoughtful TTL strategies
  • Proper memory management
  • Smart key design
  • Performance optimization
  • Monitoring and metrics

Remember: Caching is an optimization, not a silver bullet. Use it wisely.