This repository was archived by the owner on Jan 29, 2026. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 69
This repository was archived by the owner on Jan 29, 2026. It is now read-only.
[Security] Implement Request Payload Size Validation #70
Copy link
Copy link
Open
Labels
bugSomething isn't workingSomething isn't workingcodexOpenAI's CodexOpenAI's CodexdocumentationImprovements or additions to documentationImprovements or additions to documentationenhancementNew feature or requestNew feature or requestgen/qol improvements
Description
⚠️ Priority: HIGH - Security & Stability
Background
While Express has default JSON payload limits, the application lacks validation for workflow/store data complexity (node counts, edge counts, nested structure depth), which could cause denial-of-service through large payloads.
Current Implementation Gap
// backend/src/server.js (line 19)
app.use(express.json()); // Uses default limit of ~100kb
// backend/src/api/middleware/validation.js - MISSING complexity checks
export function validateWorkflowData(req, res, next) {
const workflow = req.body;
if (!workflow.metadata?.id) {
return res.status(400).json({...});
}
// No checks for nodes.length, edges.length, or structure depth!
next();
}Attack Scenarios
- Node/Edge Bomb: Submit workflow with 100,000 nodes → memory exhaustion
- Deep Nesting: Submit deeply nested store state → stack overflow during processing
- Large String Fields: Submit 10MB description field → JSON parsing overhead
- Array Flooding: Submit workflow with 50,000 edges → processing delay
Recommended Solution
Part 1: Express JSON Size Limit
// backend/src/server.js (update line 19)
app.use(express.json({
limit: '1mb', // Maximum request body size
strict: true // Only parse arrays and objects
}));
app.use(express.urlencoded({
extended: true,
limit: '1mb'
}));Part 2: Workflow Complexity Validation
// backend/src/api/middleware/validation.js
const LIMITS = {
MAX_NODES: 1000,
MAX_EDGES: 5000,
MAX_NAME_LENGTH: 200,
MAX_DESCRIPTION_LENGTH: 5000,
MAX_TAGS: 50,
MAX_NESTED_DEPTH: 10
};
export function validateWorkflowData(req, res, next) {
const workflow = req.body;
const errors = [];
// Node count validation
if (workflow.nodes && workflow.nodes.length > LIMITS.MAX_NODES) {
errors.push(`Too many nodes (max ${LIMITS.MAX_NODES})`);
}
// Edge count validation
if (workflow.edges && workflow.edges.length > LIMITS.MAX_EDGES) {
errors.push(`Too many edges (max ${LIMITS.MAX_EDGES})`);
}
// String field length validation
if (workflow.metadata?.name?.length > LIMITS.MAX_NAME_LENGTH) {
errors.push(`Name too long (max ${LIMITS.MAX_NAME_LENGTH} characters)`);
}
if (workflow.metadata?.description?.length > LIMITS.MAX_DESCRIPTION_LENGTH) {
errors.push(`Description too long (max ${LIMITS.MAX_DESCRIPTION_LENGTH} characters)`);
}
// Tags validation
if (workflow.metadata?.tags?.length > LIMITS.MAX_TAGS) {
errors.push(`Too many tags (max ${LIMITS.MAX_TAGS})`);
}
// Node data complexity (prevent nested bombs)
if (workflow.nodes) {
for (const node of workflow.nodes) {
if (getObjectDepth(node) > LIMITS.MAX_NESTED_DEPTH) {
errors.push(`Node data too deeply nested (max depth ${LIMITS.MAX_NESTED_DEPTH})`);
break;
}
}
}
if (errors.length > 0) {
return res.status(400).json({
error: {
message: 'Workflow validation failed',
details: errors
}
});
}
next();
}
function getObjectDepth(obj, depth = 0) {
if (depth > 20) return depth; // Safety cutoff
if (!obj || typeof obj !== 'object') return depth;
const depths = Object.values(obj).map(v => getObjectDepth(v, depth + 1));
return Math.max(depth, ...depths);
}Part 3: Store State Validation
// backend/src/api/middleware/validation.js
export function validateStoreData(req, res, next) {
const store = req.body;
const errors = [];
// Viewport validation
if (store.viewport) {
if (typeof store.viewport.zoom !== 'number' ||
store.viewport.zoom < 0.1 ||
store.viewport.zoom > 10) {
errors.push('Invalid viewport zoom (must be between 0.1 and 10)');
}
}
// Selection validation
if (store.selectedNodes && store.selectedNodes.length > LIMITS.MAX_NODES) {
errors.push(`Too many selected nodes (max ${LIMITS.MAX_NODES})`);
}
// Nested depth check
if (getObjectDepth(store) > LIMITS.MAX_NESTED_DEPTH) {
errors.push(`Store state too deeply nested (max depth ${LIMITS.MAX_NESTED_DEPTH})`);
}
if (errors.length > 0) {
return res.status(400).json({
error: { message: 'Store validation failed', details: errors }
});
}
next();
}Files to Modify
backend/src/server.js(update express.json middleware, line 19)backend/src/api/middleware/validation.js(add complexity checks)backend/src/api/routes/workflows.js(apply validateWorkflowData to POST/PUT)backend/src/api/routes/store.js(apply validateStoreData to PUT)
Configuration File (Optional)
Create backend/src/config/limits.js:
export const LIMITS = {
// Request size
MAX_REQUEST_SIZE: '1mb',
// Workflow limits
MAX_NODES: parseInt(process.env.MAX_NODES) || 1000,
MAX_EDGES: parseInt(process.env.MAX_EDGES) || 5000,
MAX_NAME_LENGTH: 200,
MAX_DESCRIPTION_LENGTH: 5000,
MAX_TAGS: 50,
// Structure limits
MAX_NESTED_DEPTH: 10,
MAX_ARRAY_LENGTH: 10000,
// Rate limiting (already exists)
RATE_LIMIT_WINDOW_MS: 60000,
RATE_LIMIT_MAX_REQUESTS: 100
};Acceptance Criteria
- Express JSON parser limited to 1MB request size
- Workflow validation checks node count (max 1000)
- Workflow validation checks edge count (max 5000)
- Workflow validation checks name/description length
- Workflow validation checks nested depth (max 10 levels)
- Store state validation checks structure complexity
- Validation errors return 400 with detailed error messages
- Tests added for boundary conditions (at limit, over limit)
- Documentation updated with API limits
- Limits configurable via environment variables
Testing Plan
// Test cases to implement
describe('Payload Validation', () => {
it('should reject workflow with too many nodes', async () => {
const workflow = {
nodes: Array(1001).fill({ id: 'test', type: 'node' }),
edges: []
};
const res = await request(app)
.post('/api/workflows')
.send(workflow)
.expect(400);
expect(res.body.error.details).toContain('Too many nodes');
});
it('should accept workflow at node limit', async () => {
const workflow = {
nodes: Array(1000).fill({ id: 'test', type: 'node' }),
edges: []
};
await request(app).post('/api/workflows').send(workflow).expect(201);
});
// Add more test cases...
});References
- Pull Request: [PDE-3] Refactor: adding TUI & other upgrades #66
- Review Comment: [PDE-3] Refactor: adding TUI & other upgrades #66
- Affected Files:
backend/src/server.jsbackend/src/api/middleware/validation.jsbackend/src/api/routes/workflows.jsbackend/src/api/routes/store.js
Additional Context
This prevents both accidental and malicious large payloads from impacting system stability. Consider monitoring actual usage patterns to adjust limits appropriately.
Copilot
Metadata
Metadata
Labels
bugSomething isn't workingSomething isn't workingcodexOpenAI's CodexOpenAI's CodexdocumentationImprovements or additions to documentationImprovements or additions to documentationenhancementNew feature or requestNew feature or requestgen/qol improvements