Redis Caching Service


Redis is the high-performance in-memory data store used by nself for caching, session management, and message queuing. This guide covers configuration, usage patterns, and integration strategies.

What is Redis?

Redis is an open-source, in-memory data structure store used as a database, cache, and message broker. In nself, Redis provides:

  • Caching: High-speed data caching to improve application performance
  • Session Storage: Scalable session management for web applications
  • Message Queues: Job queuing and background task processing
  • Pub/Sub: Real-time messaging and event streaming
  • Rate Limiting: API rate limiting and throttling

High Performance

Redis operates entirely in memory, providing sub-millisecond response times for cached data and operations.

Configuration

Basic Configuration

# Redis settings in .env.local
REDIS_ENABLED=true
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=
REDIS_DB=0

# Connection settings
REDIS_MAX_CONNECTIONS=100
REDIS_CONNECTION_TIMEOUT=10000
REDIS_COMMAND_TIMEOUT=5000
REDIS_RETRY_ATTEMPTS=3

Production Configuration

# Production Redis settings
REDIS_PASSWORD=very-secure-password-here
REDIS_MAX_MEMORY=2gb
REDIS_MAXMEMORY_POLICY=allkeys-lru

# Persistence settings
REDIS_SAVE_ENABLED=true
REDIS_SAVE_INTERVAL=900 1 300 10 60 10000
REDIS_AOF_ENABLED=true
REDIS_AOF_FSYNC=everysec

# Security settings
REDIS_PROTECTED_MODE=yes
REDIS_BIND_ADDRESS=127.0.0.1
REDIS_RENAME_COMMANDS=CONFIG,EVAL,DEBUG

# SSL/TLS (Redis 6+)
REDIS_TLS_ENABLED=false
REDIS_TLS_CERT_PATH=/etc/ssl/redis.crt
REDIS_TLS_KEY_PATH=/etc/ssl/redis.key

Data Types and Usage Patterns

Basic Data Types

# String operations
SET user:1:name "John Doe"
GET user:1:name
SETEX session:abc123 3600 "user-data"
INCR page:views
DECR inventory:item:123

# Hash operations (perfect for objects)
HSET user:1 name "John Doe" email "john@example.com" age 30
HGET user:1 name
HGETALL user:1
HINCRBY user:1 login_count 1

# List operations (queues, stacks)
LPUSH queue:emails "email1@example.com"
RPOP queue:emails
LRANGE recent:activities 0 9
LTRIM recent:activities 0 99

# Set operations (unique collections)
SADD tags:post:1 "redis" "cache" "database"
SMEMBERS tags:post:1
SINTER tags:post:1 tags:post:2

# Sorted Set operations (rankings, leaderboards)
ZADD leaderboard 100 "player1" 200 "player2"
ZRANGE leaderboard 0 9 WITHSCORES
ZINCRBY leaderboard 10 "player1"

Integration with Applications

NestJS Integration

// Install Redis client
npm install redis @types/redis

// Redis service
import { Injectable } from '@nestjs/common';
import { createClient, RedisClientType } from 'redis';

@Injectable()
export class RedisService {
  private client: RedisClientType;

  constructor() {
    this.client = createClient({
      url: `redis://:${process.env.REDIS_PASSWORD}@${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`,
      database: parseInt(process.env.REDIS_DB || '0')
    });
    
    this.client.connect();
  }

  async set(key: string, value: string, ttl?: number): Promise<void> {
    if (ttl) {
      await this.client.setEx(key, ttl, value);
    } else {
      await this.client.set(key, value);
    }
  }

  async get(key: string): Promise<string | null> {
    return await this.client.get(key);
  }

  async del(key: string): Promise<number> {
    return await this.client.del(key);
  }

  async hSet(key: string, field: string, value: string): Promise<number> {
    return await this.client.hSet(key, field, value);
  }

  async hGetAll(key: string): Promise<Record<string, string>> {
    return await this.client.hGetAll(key);
  }

  async exists(key: string): Promise<number> {
    return await this.client.exists(key);
  }

  async expire(key: string, seconds: number): Promise<boolean> {
    return await this.client.expire(key, seconds);
  }
}

Python Integration

# Install Redis client
pip install redis

import redis
import json
import os

class RedisService:
    def __init__(self):
        self.client = redis.Redis(
            host=os.getenv('REDIS_HOST', 'localhost'),
            port=int(os.getenv('REDIS_PORT', 6379)),
            password=os.getenv('REDIS_PASSWORD', None),
            db=int(os.getenv('REDIS_DB', 0)),
            decode_responses=True
        )
    
    def set(self, key: str, value: str, ttl: int = None):
        return self.client.set(key, value, ex=ttl)
    
    def get(self, key: str):
        return self.client.get(key)
    
    def delete(self, key: str):
        return self.client.delete(key)
    
    def set_json(self, key: str, data: dict, ttl: int = None):
        return self.set(key, json.dumps(data), ttl)
    
    def get_json(self, key: str):
        data = self.get(key)
        return json.loads(data) if data else None
    
    def increment(self, key: str, amount: int = 1):
        return self.client.incr(key, amount)
    
    def hash_set(self, key: str, field: str, value: str):
        return self.client.hset(key, field, value)
    
    def hash_get_all(self, key: str):
        return self.client.hgetall(key)

Go Integration

package redis

import (
    "context"
    "encoding/json"
    "time"

    "github.com/go-redis/redis/v8"
)

type RedisService struct {
    client *redis.Client
}

func NewRedisService(addr, password string, db int) *RedisService {
    rdb := redis.NewClient(&redis.Options{
        Addr:     addr,
        Password: password,
        DB:       db,
    })

    return &RedisService{client: rdb}
}

func (r *RedisService) Set(ctx context.Context, key string, value interface{}, expiration time.Duration) error {
    return r.client.Set(ctx, key, value, expiration).Err()
}

func (r *RedisService) Get(ctx context.Context, key string) (string, error) {
    return r.client.Get(ctx, key).Result()
}

func (r *RedisService) Delete(ctx context.Context, key string) error {
    return r.client.Del(ctx, key).Err()
}

func (r *RedisService) SetJSON(ctx context.Context, key string, data interface{}, expiration time.Duration) error {
    jsonData, err := json.Marshal(data)
    if err != nil {
        return err
    }
    return r.client.Set(ctx, key, jsonData, expiration).Err()
}

func (r *RedisService) GetJSON(ctx context.Context, key string, dest interface{}) error {
    val, err := r.client.Get(ctx, key).Result()
    if err != nil {
        return err
    }
    return json.Unmarshal([]byte(val), dest)
}

Caching Strategies

Cache-Aside Pattern

// Cache-aside implementation
@Injectable()
export class UserService {
  constructor(
    private readonly redisService: RedisService,
    private readonly userRepository: UserRepository
  ) {}

  async getUser(id: string): Promise<User> {
    const cacheKey = `user:${id}`;
    
    // Try to get from cache first
    const cachedUser = await this.redisService.get(cacheKey);
    if (cachedUser) {
      return JSON.parse(cachedUser);
    }

    // If not in cache, get from database
    const user = await this.userRepository.findById(id);
    if (user) {
      // Store in cache for 1 hour
      await this.redisService.set(cacheKey, JSON.stringify(user), 3600);
    }

    return user;
  }

  async updateUser(id: string, userData: Partial<User>): Promise<User> {
    const user = await this.userRepository.update(id, userData);
    
    // Invalidate cache
    await this.redisService.del(`user:${id}`);
    
    return user;
  }
}

Write-Through Caching

// Write-through caching
async createUser(userData: CreateUserDto): Promise<User> {
  // Save to database
  const user = await this.userRepository.create(userData);
  
  // Immediately cache the new user
  const cacheKey = `user:${user.id}`;
  await this.redisService.set(cacheKey, JSON.stringify(user), 3600);
  
  return user;
}

Cache Warming

// Warm up cache with frequently accessed data
@Cron('0 0 * * *') // Run daily at midnight
async warmUpCache() {
  const popularUsers = await this.userRepository.findPopular(100);
  
  for (const user of popularUsers) {
    const cacheKey = `user:${user.id}`;
    await this.redisService.set(cacheKey, JSON.stringify(user), 86400); // 24 hours
  }
  
  console.log(`Warmed up cache with ${popularUsers.length} users`);
}

Session Management

Express Session Store

// Install session store
npm install express-session connect-redis

// Configure Redis session store
import session from 'express-session';
import RedisStore from 'connect-redis';
import { createClient } from 'redis';

const redisClient = createClient({
  url: `redis://:${process.env.REDIS_PASSWORD}@${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`
});

app.use(session({
  store: new RedisStore({ client: redisClient }),
  secret: process.env.SESSION_SECRET,
  resave: false,
  saveUninitialized: false,
  cookie: {
    secure: process.env.NODE_ENV === 'production',
    httpOnly: true,
    maxAge: 24 * 60 * 60 * 1000 // 24 hours
  }
}));

// Session usage
app.post('/login', (req, res) => {
  if (authenticateUser(req.body)) {
    req.session.userId = user.id;
    req.session.role = user.role;
    res.json({ success: true });
  }
});

app.post('/logout', (req, res) => {
  req.session.destroy((err) => {
    if (err) {
      return res.status(500).json({ error: 'Could not log out' });
    }
    res.json({ success: true });
  });
});

Message Queues and Background Jobs

BullMQ Integration

// BullMQ uses Redis as its backing store
npm install bullmq

// Job queue setup
import { Queue, Worker, Job } from 'bullmq';

const emailQueue = new Queue('email', {
  connection: {
    host: process.env.REDIS_HOST,
    port: parseInt(process.env.REDIS_PORT),
    password: process.env.REDIS_PASSWORD
  }
});

// Add jobs to queue
export class EmailService {
  async sendWelcomeEmail(userId: string, email: string) {
    await emailQueue.add('welcome-email', {
      userId,
      email,
      template: 'welcome'
    });
  }

  async sendPasswordReset(email: string, resetToken: string) {
    await emailQueue.add('password-reset', {
      email,
      resetToken
    }, {
      attempts: 3,
      backoff: {
        type: 'exponential',
        delay: 2000
      }
    });
  }
}

// Worker to process jobs
const emailWorker = new Worker('email', async (job: Job) => {
  switch (job.name) {
    case 'welcome-email':
      await processWelcomeEmail(job.data);
      break;
    case 'password-reset':
      await processPasswordReset(job.data);
      break;
  }
}, {
  connection: {
    host: process.env.REDIS_HOST,
    port: parseInt(process.env.REDIS_PORT),
    password: process.env.REDIS_PASSWORD
  },
  concurrency: 5
});

Pub/Sub Messaging

Real-time Notifications

// Publisher service
@Injectable()
export class NotificationService {
  constructor(private readonly redisService: RedisService) {}

  async publishNotification(userId: string, notification: any) {
    const channel = `notifications:${userId}`;
    await this.redisService.publish(channel, JSON.stringify(notification));
  }

  async publishToAll(notification: any) {
    await this.redisService.publish('notifications:all', JSON.stringify(notification));
  }
}

// Subscriber service
@Injectable()
export class NotificationSubscriber {
  private subscriber: RedisClientType;

  constructor() {
    this.subscriber = createClient({
      url: `redis://:${process.env.REDIS_PASSWORD}@${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`
    });
    this.subscriber.connect();
    this.setupSubscriptions();
  }

  private async setupSubscriptions() {
    // Subscribe to all user notifications
    await this.subscriber.pSubscribe('notifications:*', (message, channel) => {
      const userId = channel.split(':')[1];
      const notification = JSON.parse(message);
      
      // Send via WebSocket, push notification, etc.
      this.sendToUser(userId, notification);
    });
  }

  private sendToUser(userId: string, notification: any) {
    // Implementation depends on your WebSocket/SSE setup
    console.log(`Sending to user ${userId}:`, notification);
  }
}

Rate Limiting

API Rate Limiting

// Rate limiting middleware
@Injectable()
export class RateLimitService {
  constructor(private readonly redisService: RedisService) {}

  async checkRateLimit(identifier: string, limit: number, window: number): Promise<{ allowed: boolean; remaining: number }> {
    const key = `rate_limit:${identifier}`;
    const current = await this.redisService.get(key);
    
    if (current === null) {
      await this.redisService.set(key, '1', window);
      return { allowed: true, remaining: limit - 1 };
    }
    
    const currentCount = parseInt(current);
    if (currentCount >= limit) {
      return { allowed: false, remaining: 0 };
    }
    
    await this.redisService.increment(key);
    return { allowed: true, remaining: limit - currentCount - 1 };
  }
}

// Rate limiting decorator
export function RateLimit(limit: number = 100, window: number = 60) {
  return function(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
    const originalMethod = descriptor.value;
    
    descriptor.value = async function(...args: any[]) {
      const req = args[0]; // Assume first arg is request
      const identifier = req.ip || req.user?.id || 'anonymous';
      
      const rateLimitService = new RateLimitService(redisService);
      const result = await rateLimitService.checkRateLimit(identifier, limit, window);
      
      if (!result.allowed) {
        throw new HttpException('Rate limit exceeded', HttpStatus.TOO_MANY_REQUESTS);
      }
      
      // Set rate limit headers
      const res = args[1];
      res.setHeader('X-RateLimit-Remaining', result.remaining.toString());
      
      return originalMethod.apply(this, args);
    };
    
    return descriptor;
  };
}

// Usage
@Get('api/data')
@RateLimit(50, 300) // 50 requests per 5 minutes
async getData(@Req() req, @Res() res) {
  return this.dataService.getData();
}

Advanced Redis Features

Lua Scripts

// Atomic increment with maximum value
const incrementScript = `
  local current = redis.call('GET', KEYS[1])
  local max_value = tonumber(ARGV[1])
  local increment = tonumber(ARGV[2])
  
  if current == false then
    current = 0
  else
    current = tonumber(current)
  end
  
  if current + increment <= max_value then
    redis.call('SET', KEYS[1], current + increment)
    return current + increment
  else
    return current
  end
`;

// Use the script
async incrementWithMax(key: string, maxValue: number, increment: number = 1): Promise<number> {
  return await this.client.eval(incrementScript, {
    keys: [key],
    arguments: [maxValue.toString(), increment.toString()]
  }) as number;
}

Redis Streams

// Event sourcing with Redis Streams
class EventStore {
  async appendEvent(streamKey: string, eventData: any): Promise<string> {
    return await this.client.xAdd(streamKey, '*', eventData);
  }
  
  async readEvents(streamKey: string, start: string = '0', end: string = '+') {
    return await this.client.xRange(streamKey, start, end);
  }
  
  async subscribeToStream(streamKey: string, consumerGroup: string, consumer: string) {
    // Create consumer group if it doesn't exist
    try {
      await this.client.xGroupCreate(streamKey, consumerGroup, '0', { MKSTREAM: true });
    } catch (error) {
      // Group might already exist
    }
    
    // Read from stream
    const messages = await this.client.xReadGroup({
      group: consumerGroup,
      consumer: consumer,
      streams: {
        [streamKey]: '>'
      },
      count: 10
    });
    
    return messages;
  }
}

Monitoring and Maintenance

Health Checks and Metrics

# Redis CLI commands for monitoring
redis-cli ping
redis-cli info
redis-cli info memory
redis-cli info stats
redis-cli monitor

# Check connected clients
redis-cli client list

# Check memory usage
redis-cli memory usage

# Slow query log
redis-cli slowlog get 10

Performance Tuning

# Redis configuration tuning
maxmemory 2gb
maxmemory-policy allkeys-lru

# Persistence tuning
save 900 1
save 300 10
save 60 10000

# Network tuning
tcp-keepalive 300
timeout 0

# Performance settings
lazyfree-lazy-eviction yes
lazyfree-lazy-expire yes
lazyfree-lazy-server-del yes

Backup and Persistence

RDB Snapshots

# Configure automatic snapshots
save 900 1      # Save if at least 1 key changed in 900 seconds
save 300 10     # Save if at least 10 keys changed in 300 seconds
save 60 10000   # Save if at least 10000 keys changed in 60 seconds

# Manual backup
redis-cli bgsave

# Check last save time
redis-cli lastsave

AOF (Append Only File)

# Enable AOF
appendonly yes
appendfilename "appendonly.aof"
appendfsync everysec

# AOF rewrite
auto-aof-rewrite-percentage 100
auto-aof-rewrite-min-size 64mb

# Manual AOF rewrite
redis-cli bgrewriteaof

Security Best Practices

Authentication and Access Control

# Set strong password
requirepass your-very-secure-password

# Rename dangerous commands
rename-command CONFIG ""
rename-command EVAL ""
rename-command DEBUG ""
rename-command SHUTDOWN REDIS_SHUTDOWN

# Network security
bind 127.0.0.1
protected-mode yes
port 6379

# Disable dangerous commands for specific users (Redis 6+)
user default on nopass ~* &* -@dangerous
user app on >app-password ~app:* +@read +@write -@dangerous

Troubleshooting

Common Issues

# Check Redis logs
nself logs redis

# Test connection
redis-cli -h localhost -p 6379 ping

# Check memory usage
redis-cli info memory | grep used_memory_human

# Find large keys
redis-cli --bigkeys

# Monitor commands in real-time
redis-cli monitor

# Check slow queries
redis-cli slowlog get

Performance Debugging

# Check hit ratio
redis-cli info stats | grep keyspace

# Memory fragmentation
redis-cli info memory | grep mem_fragmentation_ratio

# Evicted keys (cache misses)
redis-cli info stats | grep evicted_keys

# Benchmark Redis performance
redis-benchmark -h localhost -p 6379 -c 50 -n 10000

Best Practices

Key Naming Conventions

  • Use Prefixes: user:123:profile, session:abc123
  • Hierarchical Structure: app:cache:user:123
  • Consistent Separators: Use colons (:) for namespacing
  • Expiration: Set TTL for temporary data

Performance Optimization

  • Connection Pooling: Reuse connections, don't create new ones
  • Pipeline Commands: Batch multiple commands together
  • Choose Right Data Types: Use hashes for objects, sets for unique collections
  • Monitor Memory Usage: Set appropriate maxmemory and eviction policies

Next Steps

Now that you understand Redis in nself:

Redis provides the high-performance caching and messaging backbone for your nself applications. Use it wisely to dramatically improve application performance and scalability.