This repository was archived by the owner on Jan 29, 2026. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 69
This repository was archived by the owner on Jan 29, 2026. It is now read-only.
[Infrastructure] Persist Rate Limit Data Across Restarts #75
Copy link
Copy link
Open
Labels
bugSomething isn't workingSomething isn't workingcodexOpenAI's CodexOpenAI's CodexdocumentationImprovements or additions to documentationImprovements or additions to documentationenhancementNew feature or requestNew feature or requestgen/qol improvements
Description
🔒 Priority: MEDIUM - Production Readiness
Background
The current rate limiting implementation at backend/src/api/middleware/rateLimit.js uses an in-memory Map that is lost on server restart, allowing attackers to bypass limits by forcing restarts or exploiting deployment cycles.
Current Implementation - Volatile Rate Limits
// backend/src/api/middleware/rateLimit.js (line 9)
const requestCounts = new Map(); // Lost on restart
export function rateLimit(req, res, next) {
const clientId = req.clientId || req.ip;
const now = Date.now();
const windowStart = now - WINDOW_MS;
// Get or initialize count for this client
if (!requestCounts.has(clientId)) {
requestCounts.set(clientId, []);
}
const requests = requestCounts.get(clientId);
// ...
}Security Concern
Attack Scenario:
- Attacker makes 99 requests (just under 100 limit)
- Attacker triggers server restart (crash, deploy, etc.)
- Rate limit counter resets to 0
- Attacker makes another 99 requests
- Repeat → 10x normal rate limit bypass
Recommended Solutions
Option 1: File-Based Persistence (Simple, Single Instance)
// backend/src/api/middleware/rateLimit.js
import fs from 'fs/promises';
import path from 'path';
const RATE_LIMIT_FILE = path.join(process.cwd(), '.data', 'rate-limits.json');
const requestCounts = new Map();
let persistenceInterval;
// Load rate limits on startup
async function loadRateLimits() {
try {
const data = await fs.readFile(RATE_LIMIT_FILE, 'utf-8');
const stored = JSON.parse(data);
// Convert stored array back to Map and filter expired entries
const now = Date.now();
const windowStart = now - WINDOW_MS;
for (const [clientId, requests] of stored) {
const validRequests = requests.filter(timestamp => timestamp > windowStart);
if (validRequests.length > 0) {
requestCounts.set(clientId, validRequests);
}
}
logger.info({ clients: requestCounts.size }, 'Rate limits loaded from disk');
} catch (error) {
if (error.code !== 'ENOENT') {
logger.error({ error }, 'Failed to load rate limits');
}
}
}
// Persist rate limits periodically
async function persistRateLimits() {
try {
const now = Date.now();
const windowStart = now - WINDOW_MS;
// Clean up expired entries before persisting
const activeClients = [];
for (const [clientId, requests] of requestCounts.entries()) {
const validRequests = requests.filter(timestamp => timestamp > windowStart);
if (validRequests.length > 0) {
activeClients.push([clientId, validRequests]);
} else {
requestCounts.delete(clientId);
}
}
await fs.writeFile(RATE_LIMIT_FILE, JSON.stringify(activeClients));
logger.debug({ clients: activeClients.length }, 'Rate limits persisted to disk');
} catch (error) {
logger.error({ error }, 'Failed to persist rate limits');
}
}
// Start persistence scheduler
export function startRateLimitPersistence() {
loadRateLimits(); // Load on startup
// Persist every 30 seconds
persistenceInterval = setInterval(persistRateLimits, 30000);
// Persist on graceful shutdown
process.on('SIGTERM', async () => {
await persistRateLimits();
});
process.on('SIGINT', async () => {
await persistRateLimits();
});
}
export function stopRateLimitPersistence() {
if (persistenceInterval) {
clearInterval(persistenceInterval);
persistenceInterval = null;
}
}Option 2: Redis Backend (Production, Distributed)
npm install ioredis// backend/src/api/middleware/rateLimit.js
import Redis from 'ioredis';
const redis = new Redis({
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT) || 6379,
password: process.env.REDIS_PASSWORD,
db: parseInt(process.env.REDIS_DB) || 0,
lazyConnect: true
});
let useRedis = false;
// Initialize Redis connection
export async function initRateLimitStore() {
try {
await redis.connect();
useRedis = true;
logger.info('Rate limiting using Redis backend');
} catch (error) {
logger.warn({ error }, 'Redis unavailable, falling back to in-memory rate limiting');
useRedis = false;
}
}
export async function rateLimit(req, res, next) {
const clientId = req.clientId || req.ip;
const now = Date.now();
if (useRedis) {
await rateLimitRedis(clientId, now, req, res, next);
} else {
rateLimitMemory(clientId, now, req, res, next);
}
}
async function rateLimitRedis(clientId, now, req, res, next) {
const key = `ratelimit:${clientId}`;
try {
// Use Redis sorted set with timestamps as scores
const windowStart = now - WINDOW_MS;
// Remove old entries
await redis.zremrangebyscore(key, 0, windowStart);
// Count current requests in window
const count = await redis.zcard(key);
if (count >= MAX_REQUESTS_PER_WINDOW) {
const oldestRequest = await redis.zrange(key, 0, 0, 'WITHSCORES');
const resetTime = oldestRequest.length > 0
? parseInt(oldestRequest[1]) + WINDOW_MS
: now + WINDOW_MS;
return res.status(429).json({
error: {
message: 'Too many requests',
retryAfter: Math.ceil((resetTime - now) / 1000)
}
});
}
// Add current request
await redis.zadd(key, now, `${now}-${Math.random()}`);
await redis.expire(key, Math.ceil(WINDOW_MS / 1000));
// Add headers
res.setHeader('X-RateLimit-Limit', MAX_REQUESTS_PER_WINDOW);
res.setHeader('X-RateLimit-Remaining', MAX_REQUESTS_PER_WINDOW - count - 1);
res.setHeader('X-RateLimit-Reset', Math.ceil((now + WINDOW_MS) / 1000));
next();
} catch (error) {
logger.error({ error }, 'Redis rate limit check failed, allowing request');
next(); // Fail open
}
}
function rateLimitMemory(clientId, now, req, res, next) {
// ... existing in-memory implementation ...
}Part 3: Integrate with Server
// backend/src/server.js
import { startRateLimitPersistence } from './api/middleware/rateLimit.js';
// or
import { initRateLimitStore } from './api/middleware/rateLimit.js';
// After database initialization
if (process.env.REDIS_HOST) {
await initRateLimitStore(); // Redis option
} else {
startRateLimitPersistence(); // File-based option
}Files to Modify
backend/src/api/middleware/rateLimit.js(add persistence logic)backend/src/server.js(initialize persistence)backend/package.json(add ioredis if using Redis)
Environment Variables
# Redis configuration (optional)
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=your-redis-password
REDIS_DB=0Comparison: File vs Redis
| Feature | File-Based | Redis |
|---|---|---|
| Simplicity | ⭐⭐⭐⭐⭐ | ⭐⭐⭐ |
| Performance | ⭐⭐⭐ | ⭐⭐⭐⭐⭐ |
| Multi-instance | ❌ | ✅ |
| Persistence | ✅ | ✅ (with AOF/RDB) |
| Memory efficiency | ⭐⭐⭐ | ⭐⭐⭐⭐⭐ |
| Setup complexity | Low | Medium |
| Dependencies | None | Redis server |
Recommendation
- Development/Single Instance: File-based persistence
- Production/Multiple Instances: Redis backend
Acceptance Criteria
File-Based:
- Rate limit data persisted to
.data/rate-limits.json - Data loaded on server startup
- Data persisted every 30 seconds
- Data persisted on graceful shutdown
- Expired entries cleaned up before persistence
- Tests verify persistence across restart
Redis (Optional):
- Redis client configured with connection pooling
- Sorted sets used for efficient time-window queries
- Fallback to in-memory if Redis unavailable
- Connection errors logged but don't block requests
- Tests verify multi-instance rate limiting
Testing Plan
describe('Rate Limit Persistence', () => {
it('should persist rate limits across restart', async () => {
// Make 50 requests
for (let i = 0; i < 50; i++) {
await request(app).get('/api/workflows').expect(200);
}
// Trigger persistence
await persistRateLimits();
// Simulate restart
requestCounts.clear();
await loadRateLimits();
// Should still have history
const remaining = await getRemainingRequests(clientId);
expect(remaining).toBe(50); // 50 requests left in window
});
});References
- Pull Request: [PDE-3] Refactor: adding TUI & other upgrades #66
- Review Comment: [PDE-3] Refactor: adding TUI & other upgrades #66
- Affected Files:
backend/src/api/middleware/rateLimit.jsbackend/src/server.js
Additional Context
Consider implementing this alongside database persistence improvements (#68) for consistent data durability strategy.
Copilot
Metadata
Metadata
Labels
bugSomething isn't workingSomething isn't workingcodexOpenAI's CodexOpenAI's CodexdocumentationImprovements or additions to documentationImprovements or additions to documentationenhancementNew feature or requestNew feature or requestgen/qol improvements