docker run --name pixelcraft-redis \
-p 6379:6379 -d redis
npm install ioredis
const Redis = require('ioredis');
const redis = new Redis();
// Cache-aside pattern for gallery
app.get('/api/images', authenticate,
async (req, res) => {
const cacheKey =
`images:${req.userId}` +
`:page:${req.query.page || 1}`;
// Check cache first
const cached = await redis.get(cacheKey);
if (cached) {
return res.json(JSON.parse(cached));
}
// Cache miss — query database
const images = await Image.find({
owner: req.userId
})
.sort('-createdAt')
.limit(20);
// Store in cache with 60-second TTL
await redis.set(cacheKey,
JSON.stringify(images), 'EX', 60);
res.json(images);
});
app.post('/api/images/upload', authenticate,
async (req, res) => {
// ... upload logic ...
// Invalidate this user's gallery cache
const keys = await redis.keys(
`images:${req.userId}:*`
);
if (keys.length > 0)
await redis.del(...keys);
res.status(201).json(image);
});
async function rateLimit(req, res, next) {
const key = `ratelimit:${req.ip}`;
const current = await redis.incr(key);
if (current === 1) {
await redis.expire(key, 60);
// 60-second window
}
if (current > 100) {
// 100 requests per minute
return res.status(429).json({
error: 'Too many requests. ' +
'Try again in a minute.'
});
}
next();
}
app.use('/api/', rateLimit);
| Request | Without Redis | With Redis |
|---|---|---|
| First request | 50ms (DB query) | 50ms (cache miss → DB) |
| Second request | 50ms (DB again) | 1ms (cache hit!) |
git switch -c feature/PIXELCRAFT-055-redis
git add server/
git commit -m "Add Redis caching + rate limiting (PIXELCRAFT-055)"
git push origin feature/PIXELCRAFT-055-redis
# PR → Review → Merge → Close ticket ✅
Cache hierarchies exist at every level of computing.
Each level trades capacity for speed. Cache invalidation is famously "one of the two hardest problems in CS."