Skip to content
Go back

Implementing Redis Caching Strategies in Node.js

Implementing Redis Caching Strategies in Node.js

Introduction

Redis caching improves application performance by storing frequently accessed data in memory. This guide covers common caching patterns and implementation strategies.

Prerequisites

Step 1: Install Redis Client

npm install redis

Step 2: Redis Connection Setup

Create lib/redis.ts:

import { createClient, RedisClientType } from 'redis';

class RedisCache {
  private client: RedisClientType;
  private isConnected = false;

  constructor() {
    this.client = createClient({
      url: process.env.REDIS_URL || 'redis://localhost:6379'
    });

    this.client.on('error', (err) => console.error('Redis error:', err));
    this.client.on('connect', () => {
      this.isConnected = true;
      console.log('Connected to Redis');
    });
  }

  async connect() {
    if (!this.isConnected) {
      await this.client.connect();
    }
  }

  async get(key: string): Promise<string | null> {
    await this.connect();
    return await this.client.get(key);
  }

  async set(key: string, value: string, ttlSeconds?: number): Promise<void> {
    await this.connect();
    if (ttlSeconds) {
      await this.client.setEx(key, ttlSeconds, value);
    } else {
      await this.client.set(key, value);
    }
  }

  async del(key: string): Promise<number> {
    await this.connect();
    return await this.client.del(key);
  }

  async exists(key: string): Promise<number> {
    await this.connect();
    return await this.client.exists(key);
  }

  async flushPattern(pattern: string): Promise<void> {
    await this.connect();
    const keys = await this.client.keys(pattern);
    if (keys.length > 0) {
      await this.client.del(keys);
    }
  }
}

export const redis = new RedisCache();

Step 3: Cache-Aside Pattern

Most common pattern - check cache first, then database:

import { redis } from '../lib/redis';
import { getUserFromDB, updateUserInDB } from '../db/users';

export class UserService {
  private getCacheKey(userId: string): string {
    return `user:${userId}`;
  }

  async getUser(userId: string) {
    const cacheKey = this.getCacheKey(userId);
    
    // Try cache first
    const cached = await redis.get(cacheKey);
    if (cached) {
      console.log('Cache hit for user:', userId);
      return JSON.parse(cached);
    }

    // Cache miss - fetch from database
    console.log('Cache miss for user:', userId);
    const user = await getUserFromDB(userId);
    
    if (user) {
      // Store in cache with 1 hour TTL
      await redis.set(cacheKey, JSON.stringify(user), 3600);
    }
    
    return user;
  }

  async updateUser(userId: string, userData: any) {
    // Update database
    const updatedUser = await updateUserInDB(userId, userData);
    
    // Invalidate cache
    const cacheKey = this.getCacheKey(userId);
    await redis.del(cacheKey);
    
    return updatedUser;
  }
}

Step 4: Write-Through Pattern

Updates both cache and database simultaneously:

import { redis } from '../lib/redis';

export class WriteThoughService {
  async setUserData(userId: string, data: any) {
    const cacheKey = `user:${userId}`;
    
    try {
      // Write to database first
      await updateUserInDB(userId, data);
      
      // Then update cache
      await redis.set(cacheKey, JSON.stringify(data), 3600);
      
      return data;
    } catch (error) {
      // If database write fails, don't cache invalid data
      console.error('Write-through failed:', error);
      throw error;
    }
  }
}

Step 5: Write-Behind (Write-Back) Pattern

Updates cache immediately, database asynchronously:

import { redis } from '../lib/redis';

export class WriteBehindService {
  private writeQueue: Map<string, any> = new Map();
  private batchTimeout: NodeJS.Timeout | null = null;

  async setUserData(userId: string, data: any) {
    const cacheKey = `user:${userId}`;
    
    // Update cache immediately
    await redis.set(cacheKey, JSON.stringify(data), 3600);
    
    // Queue for batch database write
    this.writeQueue.set(userId, data);
    this.scheduleBatchWrite();
    
    return data;
  }

  private scheduleBatchWrite() {
    if (this.batchTimeout) return;
    
    this.batchTimeout = setTimeout(async () => {
      await this.flushWriteQueue();
      this.batchTimeout = null;
    }, 5000); // Batch writes every 5 seconds
  }

  private async flushWriteQueue() {
    const updates = Array.from(this.writeQueue.entries());
    this.writeQueue.clear();
    
    // Batch update database
    try {
      await Promise.all(
        updates.map(([userId, data]) => updateUserInDB(userId, data))
      );
      console.log(`Flushed ${updates.length} updates to database`);
    } catch (error) {
      console.error('Batch write failed:', error);
      // Re-queue failed updates or handle appropriately
    }
  }
}

Step 6: Cache Warming

Pre-populate cache with frequently accessed data:

import { redis } from '../lib/redis';
import { getPopularUsers, getTrendingPosts } from '../db/queries';

export class CacheWarmer {
  async warmUserCache() {
    console.log('Warming user cache...');
    
    const popularUsers = await getPopularUsers(100);
    
    const promises = popularUsers.map(async (user) => {
      const cacheKey = `user:${user.id}`;
      await redis.set(cacheKey, JSON.stringify(user), 7200); // 2 hours
    });
    
    await Promise.all(promises);
    console.log(`Warmed cache for ${popularUsers.length} users`);
  }

  async warmPostCache() {
    console.log('Warming post cache...');
    
    const trendingPosts = await getTrendingPosts(50);
    
    await Promise.all(
      trendingPosts.map(post => 
        redis.set(`post:${post.id}`, JSON.stringify(post), 3600)
      )
    );
    
    console.log(`Warmed cache for ${trendingPosts.length} posts`);
  }

  // Schedule cache warming
  startCacheWarmingSchedule() {
    // Warm cache every 6 hours
    setInterval(() => {
      this.warmUserCache().catch(console.error);
      this.warmPostCache().catch(console.error);
    }, 6 * 60 * 60 * 1000);
  }
}

Step 7: Cache Invalidation Patterns

import { redis } from '../lib/redis';

export class CacheInvalidator {
  // Tag-based invalidation
  async invalidateByTag(tag: string) {
    const pattern = `*:${tag}:*`;
    await redis.flushPattern(pattern);
  }

  // Time-based invalidation with events
  async invalidateUserRelatedData(userId: string) {
    const patterns = [
      `user:${userId}`,
      `user:${userId}:*`,
      `posts:user:${userId}`,
      `friends:${userId}:*`
    ];
    
    await Promise.all(
      patterns.map(pattern => redis.flushPattern(pattern))
    );
  }

  // Cascade invalidation
  async invalidatePostAndRelated(postId: string) {
    await Promise.all([
      redis.del(`post:${postId}`),
      redis.flushPattern(`comments:post:${postId}:*`),
      redis.flushPattern(`likes:post:${postId}:*`),
      redis.del(`post:${postId}:stats`)
    ]);
  }
}

Step 8: Advanced Caching Middleware

import { Request, Response, NextFunction } from 'express';
import { redis } from '../lib/redis';

export function cacheMiddleware(ttl: number = 300) {
  return async (req: Request, res: Response, next: NextFunction) => {
    const cacheKey = `route:${req.method}:${req.originalUrl}`;
    
    try {
      const cached = await redis.get(cacheKey);
      if (cached) {
        return res.json(JSON.parse(cached));
      }
      
      // Intercept response to cache it
      const originalJson = res.json.bind(res);
      res.json = (body: any) => {
        // Cache successful responses only
        if (res.statusCode === 200) {
          redis.set(cacheKey, JSON.stringify(body), ttl).catch(console.error);
        }
        return originalJson(body);
      };
      
      next();
    } catch (error) {
      console.error('Cache middleware error:', error);
      next();
    }
  };
}

Summary

Redis caching strategies in Node.js include cache-aside, write-through, and write-behind patterns. Implement appropriate TTL, invalidation, and warming strategies based on your application’s data access patterns and consistency requirements.


Share this post on:

Previous Post
Building gRPC Services with Node.js and TypeScript
Next Post
Automated PostgreSQL Backups and Point-in-Time Recovery