Master Redis caching patterns for high-performance applications. Learn cache-aside, write-through, session management, rate limiting, pub/sub, and distributed locking with Node.js.
# Redis Caching Strategies Complete Guide
Implement high-performance caching with Redis for Node.js applications. Learn essential patterns for sessions, rate limiting, and distributed systems.
## Redis Client Setup
### Configuration with ioredis
```typescript
// lib/redis.ts
import Redis from "ioredis";
const getRedisConfig = () => ({
host: process.env.REDIS_HOST || "localhost",
port: parseInt(process.env.REDIS_PORT || "6379"),
password: process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 3,
retryDelayOnFailover: 100,
enableReadyCheck: true,
lazyConnect: true,
});
// Singleton pattern for connection reuse
const globalForRedis = globalThis as unknown as { redis: Redis | undefined };
export const redis = globalForRedis.redis ?? new Redis(getRedisConfig());
if (process.env.NODE_ENV !== "production") {
globalForRedis.redis = redis;
}
// Connection event handlers
redis.on("error", (err) => console.error("Redis error:", err));
redis.on("connect", () => console.log("Redis connected"));
redis.on("ready", () => console.log("Redis ready"));
// Graceful shutdown
process.on("SIGTERM", async () => {
await redis.quit();
});
```
## Caching Patterns
### Cache-Aside Pattern
```typescript
// lib/cache.ts
import { redis } from "./redis";
interface CacheOptions {
ttl?: number; // seconds
prefix?: string;
}
export async function cacheAside<T>(
key: string,
fetcher: () => Promise<T>,
options: CacheOptions = {}
): Promise<T> {
const { ttl = 3600, prefix = "cache" } = options;
const cacheKey = `${prefix}:${key}`;
// Try to get from cache
const cached = await redis.get(cacheKey);
if (cached) {
return JSON.parse(cached) as T;
}
// Fetch fresh data
const data = await fetcher();
// Store in cache (non-blocking)
redis.setex(cacheKey, ttl, JSON.stringify(data)).catch(console.error);
return data;
}
// Usage example
export async function getUser(userId: string) {
return cacheAside(
`user:${userId}`,
async () => {
const user = await db.user.findUnique({ where: { id: userId } });
return user;
},
{ ttl: 300 } // 5 minutes
);
}
// Cache invalidation
export async function invalidateCache(pattern: string) {
const keys = await redis.keys(`cache:${pattern}`);
if (keys.length > 0) {
await redis.del(...keys);
}
}
```
### Write-Through Pattern
```typescript
// lib/write-through.ts
import { redis } from "./redis";
import { db } from "./db";
export async function updateUserWithCache(
userId: string,
data: Partial<User>
): Promise<User> {
// Update database
const user = await db.user.update({
where: { id: userId },
data,
});
// Update cache immediately
await redis.setex(
`cache:user:${userId}`,
300,
JSON.stringify(user)
);
return user;
}
// Batch write-through
export async function batchUpdateWithCache(
updates: Array<{ id: string; data: Record<string, unknown> }>
) {
const pipeline = redis.pipeline();
for (const { id, data } of updates) {
const updated = await db.record.update({
where: { id },
data,
});
pipeline.setex(`cache:record:${id}`, 300, JSON.stringify(updated));
}
await pipeline.exec();
}
```
## Session Management
### Secure Session Store
```typescript
// lib/session.ts
import { redis } from "./redis";
import { nanoid } from "nanoid";
interface Session {
userId: string;
email: string;
role: string;
createdAt: number;
lastAccess: number;
}
const SESSION_TTL = 60 * 60 * 24 * 7; // 7 days
const SESSION_PREFIX = "session";
export async function createSession(user: {
id: string;
email: string;
role: string;
}): Promise<string> {
const sessionId = nanoid(32);
const session: Session = {
userId: user.id,
email: user.email,
role: user.role,
createdAt: Date.now(),
lastAccess: Date.now(),
};
await redis.setex(
`${SESSION_PREFIX}:${sessionId}`,
SESSION_TTL,
JSON.stringify(session)
);
// Track user sessions for multi-device management
await redis.sadd(`user-sessions:${user.id}`, sessionId);
await redis.expire(`user-sessions:${user.id}`, SESSION_TTL);
return sessionId;
}
export async function getSession(sessionId: string): Promise<Session | null> {
const data = await redis.get(`${SESSION_PREFIX}:${sessionId}`);
if (!data) return null;
const session = JSON.parse(data) as Session;
// Update last access (sliding expiration)
session.lastAccess = Date.now();
await redis.setex(
`${SESSION_PREFIX}:${sessionId}`,
SESSION_TTL,
JSON.stringify(session)
);
return session;
}
export async function destroySession(sessionId: string): Promise<void> {
const session = await getSession(sessionId);
if (session) {
await redis.del(`${SESSION_PREFIX}:${sessionId}`);
await redis.srem(`user-sessions:${session.userId}`, sessionId);
}
}
// Logout from all devices
export async function destroyAllUserSessions(userId: string): Promise<void> {
const sessionIds = await redis.smembers(`user-sessions:${userId}`);
if (sessionIds.length > 0) {
const keys = sessionIds.map((id) => `${SESSION_PREFIX}:${id}`);
await redis.del(...keys, `user-sessions:${userId}`);
}
}
```
## Rate Limiting
### Sliding Window Rate Limiter
```typescript
// lib/rate-limit.ts
import { redis } from "./redis";
interface RateLimitResult {
allowed: boolean;
remaining: number;
resetAt: number;
}
export async function slidingWindowRateLimit(
key: string,
limit: number,
windowMs: number
): Promise<RateLimitResult> {
const now = Date.now();
const windowStart = now - windowMs;
const redisKey = `ratelimit:${key}`;
// Remove old entries and count current window
const pipeline = redis.pipeline();
pipeline.zremrangebyscore(redisKey, 0, windowStart);
pipeline.zadd(redisKey, now, `${now}-${Math.random()}`);
pipeline.zcard(redisKey);
pipeline.expire(redisKey, Math.ceil(windowMs / 1000));
const results = await pipeline.exec();
const count = results?.[2]?.[1] as number;
const allowed = count <= limit;
const remaining = Math.max(0, limit - count);
const resetAt = now + windowMs;
// Remove the request if over limit
if (!allowed) {
await redis.zremrangebyscore(redisKey, now, now);
}
return { allowed, remaining, resetAt };
}
// Usage in API route
export async function rateLimitMiddleware(
userId: string,
limit = 100,
windowMs = 60000
): Promise<RateLimitResult> {
return slidingWindowRateLimit(`user:${userId}`, limit, windowMs);
}
```
## Distributed Locking
### Redlock Pattern
```typescript
// lib/lock.ts
import { redis } from "./redis";
import { nanoid } from "nanoid";
const LOCK_TTL = 30000; // 30 seconds
export async function acquireLock(
resource: string,
ttl = LOCK_TTL
): Promise<string | null> {
const lockId = nanoid();
const key = `lock:${resource}`;
const acquired = await redis.set(key, lockId, "PX", ttl, "NX");
return acquired === "OK" ? lockId : null;
}
export async function releaseLock(
resource: string,
lockId: string
): Promise<boolean> {
const key = `lock:${resource}`;
// Lua script for atomic check-and-delete
const script = `
if redis.call("get", KEYS[1]) == ARGV[1] then
return redis.call("del", KEYS[1])
else
return 0
end
`;
const result = await redis.eval(script, 1, key, lockId);
return result === 1;
}
// Usage with auto-release
export async function withLock<T>(
resource: string,
fn: () => Promise<T>,
ttl = LOCK_TTL
): Promise<T> {
const lockId = await acquireLock(resource, ttl);
if (!lockId) {
throw new Error(`Could not acquire lock for ${resource}`);
}
try {
return await fn();
} finally {
await releaseLock(resource, lockId);
}
}
```
## Pub/Sub for Real-Time
```typescript
// lib/pubsub.ts
import Redis from "ioredis";
const publisher = new Redis(process.env.REDIS_URL!);
const subscriber = new Redis(process.env.REDIS_URL!);
export async function publish(channel: string, message: unknown) {
await publisher.publish(channel, JSON.stringify(message));
}
export function subscribe(
channel: string,
handler: (message: unknown) => void
) {
subscriber.subscribe(channel);
subscriber.on("message", (ch, msg) => {
if (ch === channel) {
handler(JSON.parse(msg));
}
});
}
```
This Redis caching guide covers essential patterns for sessions, rate limiting, distributed locking, and real-time pub/sub in production applications.This redis prompt is ideal for developers working on:
By using this prompt, you can save hours of manual coding and ensure best practices are followed from the start. It's particularly valuable for teams looking to maintain consistency across their redis implementations.
Yes! All prompts on Antigravity AI Directory are free to use for both personal and commercial projects. No attribution required, though it's always appreciated.
This prompt works excellently with Claude, ChatGPT, Cursor, GitHub Copilot, and other modern AI coding assistants. For best results, use models with large context windows.
You can modify the prompt by adding specific requirements, constraints, or preferences. For redis projects, consider mentioning your framework version, coding style, and any specific libraries you're using.