Why Your Redis Cache Is Probably Wrong

Redis caching is deceptively complex. Discover the patterns and practices for robust cache implementations.

Redis is the go-to solution for caching, but most implementations I see are fundamentally flawed. Here's why, and how to fix it.

Common Anti-Patterns

1. The Naive Cache

// ❌ BAD: Basic cache implementation
class UserService {
  async getUser(id: string): Promise<User> {
    // Simple key, no versioning
    const cached = await redis.get(`user:${id}`);
    if (cached) {
      return JSON.parse(cached);
    }

    const user = await db.users.findById(id);
    // No TTL strategy
    await redis.set(`user:${id}`, JSON.stringify(user));
    return user;
  }
}

2. Cache Invalidation Issues

// ❌ BAD: Manual invalidation
class UserController {
  async updateUser(id: string, data: UpdateUserDTO) {
    await db.users.update(id, data);
    // What if this fails?
    await redis.del(`user:${id}`);
    // What about related caches?
    await redis.del(`userPosts:${id}`);
  }
}

3. Race Conditions

// ❌ BAD: Concurrent updates
async function updateUserCount() {
  const count = await redis.get('user_count');
  // Race condition: multiple processes might read same value
  await redis.set('user_count', parseInt(count) + 1);
}

Better Patterns

1. Smart Cache Keys

// ✅ BETTER: Versioned cache keys
class CacheKey {
  constructor(
    private prefix: string,
    private version: number = 1
  ) {}

  forEntity(id: string): string {
    return `${this.prefix}:v${this.version}:${id}`;
  }

  forQuery(params: Record<string, any>): string {
    const hash = createHash(params);
    return `${this.prefix}:v${this.version}:query:${hash}`;
  }
}

class UserCache {
  private keys = new CacheKey('user', 2);

  async get(id: string): Promise<User | null> {
    return redis.get(this.keys.forEntity(id));
  }

  async set(user: User): Promise<void> {
    const key = this.keys.forEntity(user.id);
    await redis.setex(key, 3600, JSON.stringify(user));
  }
}

2. Cache-Aside Pattern

// ✅ BETTER: Robust cache-aside implementation
class CacheAside<T> {
  constructor(
    private redis: Redis,
    private source: DataSource<T>,
    private options: CacheOptions
  ) {}

  async get(key: string): Promise<T> {
    const cached = await this.redis.get(key);
    if (cached) {
      return this.deserialize(cached);
    }

    // Use mutex to prevent thundering herd
    const mutex = await this.getMutex(key);
    if (!mutex.acquired) {
      // Another process is loading, wait and retry
      await sleep(100);
      return this.get(key);
    }

    try {
      const data = await this.source.get(key);
      await this.set(key, data);
      return data;
    } finally {
      await mutex.release();
    }
  }

  private async getMutex(key: string): Promise<Mutex> {
    return redlock.lock(`mutex:${key}`, 5000);
  }
}

3. Write-Through Cache

// ✅ BETTER: Consistent write-through caching
class UserRepository {
  constructor(
    private db: Database,
    private cache: Redis,
    private events: EventBus
  ) {}

  async update(id: string, data: UpdateUserDTO): Promise<User> {
    // Start transaction
    const tx = await this.db.transaction();
    
    try {
      // Update database
      const user = await tx.users.update(id, data);
      
      // Update cache atomically
      await this.cache.multi()
        .set(`user:${id}`, JSON.stringify(user))
        .set(`user:${id}:version`, user.version)
        .exec();
      
      // Commit transaction
      await tx.commit();
      
      // Notify other services
      await this.events.publish('user.updated', {
        id,
        version: user.version
      });
      
      return user;
    } catch (error) {
      await tx.rollback();
      throw error;
    }
  }
}

Advanced Patterns

1. Cache Stampede Prevention

// ✅ BETTER: Probabilistic early recomputation
class StampedePreventingCache {
  private beta = 1.0;

  async get(key: string): Promise<CacheEntry> {
    const entry = await this.redis.hgetall(key);
    
    if (!entry.value) {
      return this.computeValue(key);
    }

    // Check if we should recompute early
    const age = Date.now() - entry.updated;
    const ttl = entry.ttl - age;
    
    if (this.shouldRecompute(age, ttl)) {
      // Recompute in background
      this.computeValue(key).catch(console.error);
    }

    return entry.value;
  }

  private shouldRecompute(age: number, ttl: number): boolean {
    const probability = Math.exp(-this.beta * ttl);
    return Math.random() < probability;
  }
}

2. Hierarchical Cache

// ✅ BETTER: Multi-level caching
class HierarchicalCache {
  constructor(
    private l1: Cache,  // Local memory
    private l2: Cache,  // Redis
    private l3: Cache   // Database
  ) {}

  async get(key: string): Promise<any> {
    // Try L1 (memory)
    const l1Result = await this.l1.get(key);
    if (l1Result) return l1Result;

    // Try L2 (Redis)
    const l2Result = await this.l2.get(key);
    if (l2Result) {
      await this.l1.set(key, l2Result);
      return l2Result;
    }

    // Load from L3 (DB)
    const l3Result = await this.l3.get(key);
    await Promise.all([
      this.l1.set(key, l3Result),
      this.l2.set(key, l3Result)
    ]);

    return l3Result;
  }
}

3. Intelligent TTL

// ✅ BETTER: Dynamic TTL based on access patterns
class AdaptiveTTLCache {
  async set(key: string, value: any): Promise<void> {
    const accessPattern = await this.getAccessPattern(key);
    const ttl = this.calculateTTL(accessPattern);

    await this.redis
      .multi()
      .set(key, JSON.stringify(value))
      .expire(key, ttl)
      .exec();
  }

  private calculateTTL(pattern: AccessPattern): number {
    if (pattern.frequency === 'high') {
      return 3600; // 1 hour for frequently accessed
    }
    if (pattern.updates === 'frequent') {
      return 300;  // 5 minutes for frequently updated
    }
    return 1800;   // 30 minutes default
  }
}

Best Practices

1. Error Handling

class ResilientCache {
  async get(key: string): Promise<any> {
    try {
      return await this.redis.get(key);
    } catch (error) {
      this.metrics.incrementError('cache_read');
      
      // Circuit breaker pattern
      if (this.shouldDisableCache(error)) {
        await this.temporarilyDisableCache();
      }
      
      // Fallback to database
      return this.getFallback(key);
    }
  }
}

2. Monitoring

class MonitoredCache {
  private metrics = new CacheMetrics();

  async get(key: string): Promise<any> {
    const timer = this.metrics.startTimer();
    
    try {
      const value = await this.redis.get(key);
      this.metrics.recordHit(key, value != null);
      return value;
    } finally {
      timer.end();
    }
  }
}

class CacheMetrics {
  private hitRate = new Rate('cache_hit_rate');
  private latency = new Histogram('cache_latency');
  private size = new Gauge('cache_size');

  recordHit(key: string, hit: boolean) {
    this.hitRate.record(hit ? 1 : 0);
  }
}

Conclusion

For effective Redis caching:

  • Use versioned cache keys
  • Implement proper invalidation strategies
  • Handle race conditions
  • Prevent cache stampedes
  • Monitor and measure everything

Remember: Caching is easy to implement but hard to get right.