Practical Examples

A set of patterns, snippets and best practices showing how to integrate Redistill into real-world web applications and services.

Data sizes & basic operations

Redistill is optimized for typical cache payload sizes:

  • Session tokens: 32–256 bytes.
  • API JSON responses: 1–10 KB.
  • HTML fragments/pages: 10–200 KB.
# Small key-value (session)
SET "session:user:a1b2c3d4" "user_id=12345,logged_in=true,role=admin"

# Medium JSON response (API cache)
SET "api:users:12345" "{...json...}" EX 300

# Large HTML page cache
SET "page:/products/12345" "<html>...</html>" EX 3600

HTTP response caching (Python + Flask)

import json
import redis
from flask import Flask, jsonify
from functools import wraps

app = Flask(__name__)
cache = redis.Redis(host="localhost", port=6379, decode_responses=True)

def cache_response(ttl=300):
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            cache_key = f"api:{func.__name__}:{':'.join(map(str, args))}"
            cached = cache.get(cache_key)
            if cached is not None:
                return jsonify(json.loads(cached)), 200, {"X-Cache": "HIT"}

            result = func(*args, **kwargs)
            cache.setex(cache_key, ttl, json.dumps(result))
            return jsonify(result), 200, {"X-Cache": "MISS"}

        return wrapper
    return decorator

@app.route("/api/users/<int:user_id>")
@cache_response(ttl=300)
def get_user(user_id):
    user = load_user_from_db(user_id)
    return {"id": user.id, "name": user.name, "email": user.email}

JSON API caching (Node.js + Express)

const express = require("express");
const Redis = require("ioredis");

const app = express();
const redis = new Redis({ host: "localhost", port: 6379 });

const cacheMiddleware = (ttl = 300) => async (req, res, next) => {
  const cacheKey = `api:${req.originalUrl}`;
  try {
    const cached = await redis.get(cacheKey);
    if (cached) {
      res.set("X-Cache", "HIT");
      return res.json(JSON.parse(cached));
    }

    const originalJson = res.json.bind(res);
    res.json = (data) => {
      redis.setex(cacheKey, ttl, JSON.stringify(data));
      res.set("X-Cache", "MISS");
      return originalJson(data);
    };

    next();
  } catch {
    next();
  }
};

app.get("/api/products", cacheMiddleware(600), async (req, res) => {
  const products = await loadProductsFromDb();
  res.json(products);
});

Using hashes for profiles & config

Hashes are ideal for structured data like user profiles or configuration objects.

# Create a user profile
HSET user:1001 name "John Doe" email "john@example.com" age "30" city "New York"

# Get a single field
HGET user:1001 name

# Get all fields
HGETALL user:1001

Python: user profiles

import redis

cache = redis.Redis(host="localhost", port=6379, decode_responses=True)

def create_user_profile(user_id, profile):
    key = f"user:{user_id}"
    cache.hset(key, mapping=profile)

def get_user_profile(user_id):
    profile = cache.hgetall(f"user:{user_id}")
    return profile or None

create_user_profile(1001, {
    "name": "John Doe",
    "email": "john@example.com",
    "age": "30",
    "city": "New York",
})

Session storage

from flask import Flask, session
from flask_session import Session
import redis

app = Flask(__name__)
app.config["SESSION_TYPE"] = "redis"
app.config["SESSION_REDIS"] = redis.Redis(host="localhost", port=6379)
Session(app)

@app.route("/login", methods=["POST"])
def login():
    session["user_id"] = 12345
    session["username"] = "john_doe"
    session.permanent = True  # expires after configured lifetime
    return {"status": "logged_in"}

Rate limiting

import redis
from datetime import datetime

cache = redis.Redis(host="localhost", port=6379)

def rate_limit(user_id, max_requests=100, window=3600):
    key = f"ratelimit:{user_id}:{datetime.utcnow().strftime('%Y%m%d%H')}"
    current = cache.get(key)
    if current and int(current) >= max_requests:
        return False

    pipe = cache.pipeline()
    pipe.incr(key)
    pipe.expire(key, window)
    pipe.execute()
    return True

Key scanning with SCAN

Use SCAN instead of KEYS * in production to avoid blocking the server.

import redis

cache = redis.Redis(host="localhost", port=6379)

def scan_keys(pattern="*", count=100):
    cursor = 0
    keys = []
    while True:
        cursor, batch = cache.scan(cursor=cursor, match=pattern, count=count)
        keys.extend(batch)
        if cursor == 0:
            break
    return keys

session_keys = scan_keys("session:*")