webupdatev1
This commit is contained in:
310
backend/middleware/apiOptimization.js
Normal file
310
backend/middleware/apiOptimization.js
Normal file
@@ -0,0 +1,310 @@
|
||||
/**
|
||||
* API Response Optimization Middleware
|
||||
* Implements response batching, field filtering, and pagination
|
||||
*/
|
||||
const logger = require("../config/logger");
|
||||
|
||||
/**
|
||||
* Enable response compression for API endpoints
|
||||
*/
|
||||
const enableCompression = (req, res, next) => {
|
||||
// Already handled by global compression middleware
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Add cache headers for GET requests
|
||||
* SAFEGUARD: Checks headers not already sent before setting
|
||||
*/
|
||||
const addCacheHeaders = (maxAge = 300) => {
|
||||
return (req, res, next) => {
|
||||
if (req.method === "GET" && !res.headersSent) {
|
||||
try {
|
||||
res.set({
|
||||
"Cache-Control": `public, max-age=${maxAge}`,
|
||||
Vary: "Accept-Encoding",
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn("Failed to set cache headers", { error: error.message });
|
||||
}
|
||||
}
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Field filtering middleware
|
||||
* Allows clients to request only specific fields: ?fields=id,name,price
|
||||
* SAFEGUARD: Validates field names to prevent injection attacks
|
||||
*/
|
||||
const fieldFilter = (req, res, next) => {
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
const fields = req.query.fields;
|
||||
|
||||
if (!fields || !data || res.headersSent) {
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
try {
|
||||
// SAFEGUARD: Validate field names (alphanumeric, underscore, dot only)
|
||||
if (!/^[a-zA-Z0-9_.,\s]+$/.test(fields)) {
|
||||
logger.warn("Invalid field filter attempted", { fields });
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
const fieldList = fields
|
||||
.split(",")
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
// SAFEGUARD: Limit number of fields
|
||||
if (fieldList.length > 50) {
|
||||
logger.warn("Too many fields requested", { count: fieldList.length });
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
const filterObject = (obj) => {
|
||||
if (!obj || typeof obj !== "object") return obj;
|
||||
|
||||
const filtered = {};
|
||||
fieldList.forEach((field) => {
|
||||
if (field in obj) {
|
||||
filtered[field] = obj[field];
|
||||
}
|
||||
});
|
||||
return filtered;
|
||||
};
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
data = data.map(filterObject);
|
||||
} else if (data.success !== undefined && data.data) {
|
||||
// Handle wrapped responses
|
||||
if (Array.isArray(data.data)) {
|
||||
data.data = data.data.map(filterObject);
|
||||
} else {
|
||||
data.data = filterObject(data.data);
|
||||
}
|
||||
} else {
|
||||
data = filterObject(data);
|
||||
}
|
||||
|
||||
return originalJson(data);
|
||||
} catch (error) {
|
||||
logger.error("Field filter error", { error: error.message });
|
||||
return originalJson(data);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Pagination middleware
|
||||
* Adds pagination support: ?page=1&limit=20
|
||||
*/
|
||||
const paginate = (defaultLimit = 20, maxLimit = 100) => {
|
||||
return (req, res, next) => {
|
||||
const page = Math.max(1, parseInt(req.query.page) || 1);
|
||||
const limit = Math.min(
|
||||
maxLimit,
|
||||
Math.max(1, parseInt(req.query.limit) || defaultLimit)
|
||||
);
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
req.pagination = {
|
||||
page,
|
||||
limit,
|
||||
offset,
|
||||
maxLimit,
|
||||
};
|
||||
|
||||
// Helper to add pagination info to response
|
||||
res.paginate = (data, total) => {
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
return res.json({
|
||||
success: true,
|
||||
data,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages,
|
||||
hasNext: page < totalPages,
|
||||
hasPrev: page > 1,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Response time tracking
|
||||
* SAFEGUARD: Checks headers not sent before setting X-Response-Time header
|
||||
*/
|
||||
const trackResponseTime = (req, res, next) => {
|
||||
const start = Date.now();
|
||||
|
||||
res.on("finish", () => {
|
||||
const duration = Date.now() - start;
|
||||
|
||||
// Log slow requests
|
||||
if (duration > 1000) {
|
||||
logger.warn("Slow API request", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
duration: `${duration}ms`,
|
||||
status: res.statusCode,
|
||||
});
|
||||
}
|
||||
|
||||
// Add response time header only if headers haven't been sent
|
||||
if (!res.headersSent) {
|
||||
try {
|
||||
res.set("X-Response-Time", `${duration}ms`);
|
||||
} catch (error) {
|
||||
logger.debug("Could not set X-Response-Time header", {
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* ETag generation for GET requests
|
||||
* SAFEGUARD: Checks headersSent before setting headers
|
||||
*/
|
||||
const generateETag = (req, res, next) => {
|
||||
if (req.method !== "GET") {
|
||||
return next();
|
||||
}
|
||||
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
try {
|
||||
// SAFEGUARD: Don't process if headers already sent
|
||||
if (res.headersSent) {
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
// Generate simple ETag from stringified data
|
||||
const dataStr = JSON.stringify(data);
|
||||
const etag = `W/"${Buffer.from(dataStr).length.toString(16)}"`;
|
||||
|
||||
// Check if client has cached version
|
||||
if (req.headers["if-none-match"] === etag) {
|
||||
res.status(304).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.set("ETag", etag);
|
||||
return originalJson(data);
|
||||
} catch (error) {
|
||||
logger.error("ETag generation error", { error: error.message });
|
||||
return originalJson(data);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* JSON response size optimization
|
||||
* Removes null values and compacts responses
|
||||
*/
|
||||
const optimizeJSON = (req, res, next) => {
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
if (data && typeof data === "object") {
|
||||
data = removeNulls(data);
|
||||
}
|
||||
return originalJson(data);
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
function removeNulls(obj) {
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(removeNulls);
|
||||
}
|
||||
|
||||
if (obj !== null && typeof obj === "object") {
|
||||
return Object.entries(obj).reduce((acc, [key, value]) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
acc[key] = removeNulls(value);
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch request handler
|
||||
* Allows multiple API calls in a single request
|
||||
* POST /api/batch with body: { requests: [{ method, url, body }] }
|
||||
*/
|
||||
const batchHandler = async (req, res) => {
|
||||
const { requests } = req.body;
|
||||
|
||||
if (!Array.isArray(requests) || requests.length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid batch request format",
|
||||
});
|
||||
}
|
||||
|
||||
if (requests.length > 10) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Maximum 10 requests per batch",
|
||||
});
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
requests.map(async (request) => {
|
||||
try {
|
||||
// This would require implementation of internal request handling
|
||||
// For now, return a placeholder
|
||||
return {
|
||||
status: 200,
|
||||
data: { message: "Batch processing not fully implemented" },
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 500,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
results: results.map((result, index) => ({
|
||||
...requests[index],
|
||||
...result,
|
||||
})),
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
enableCompression,
|
||||
addCacheHeaders,
|
||||
fieldFilter,
|
||||
paginate,
|
||||
trackResponseTime,
|
||||
generateETag,
|
||||
optimizeJSON,
|
||||
batchHandler,
|
||||
};
|
||||
339
backend/middleware/apiOptimization.js.corrupt
Normal file
339
backend/middleware/apiOptimization.js.corrupt
Normal file
@@ -0,0 +1,339 @@
|
||||
/**
|
||||
* API Response Optimization Middleware
|
||||
* Implements response batching, field filtering, and pagination
|
||||
*/
|
||||
const logger = require("../config/logger");
|
||||
|
||||
/**
|
||||
* Enable response compression for API endpoints
|
||||
*/
|
||||
const enableCompression = (req, res, next) => {
|
||||
// Already handled by global compression middleware
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Add cache headers for GET requests
|
||||
*/
|
||||
const addCacheHeaders = (maxAge = 300) => {
|
||||
return (req, res, next) => {
|
||||
if (req.method === "GET" && !res.headersSent) {
|
||||
try {
|
||||
res.set({
|
||||
"Cache-Control": `public, max-age=${maxAge}`,
|
||||
Vary: "Accept-Encoding",
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn("Failed to set cache headers", { error: error.message });
|
||||
}
|
||||
}
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Field filtering middleware
|
||||
* Allows clients to request only specific fields: ?fields=id,name,price
|
||||
* SAFEGUARD: Validates field names to prevent injection attacks
|
||||
*/
|
||||
const fieldFilter = (req, res, next) => {
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
const fields = req.query.fields;
|
||||
|
||||
if (!fields || !data || res.headersSent) {
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
try {
|
||||
// SAFEGUARD: Validate field names (alphanumeric, underscore, dot only)
|
||||
if (!/^[a-zA-Z0-9_.,\s]+$/.test(fields)) {
|
||||
logger.warn("Invalid field filter attempted", { fields });
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
const fieldList = fields.split(",").map((f) => f.trim()).filter(Boolean);
|
||||
|
||||
// SAFEGUARD: Limit number of fields
|
||||
if (fieldList.length > 50) {
|
||||
logger.warn("Too many fields requested", { count: fieldList.length });
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
const filterObject = (obj) => {
|
||||
if (!obj || typeof obj !== "object") return obj;
|
||||
|
||||
const filtered = {};
|
||||
fieldList.forEach((field) => {
|
||||
if (field in obj) {
|
||||
filtered[field] = obj[field];
|
||||
}
|
||||
});
|
||||
return filtered;
|
||||
};
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
data = data.map(filterObject);
|
||||
} else if (data.success !== undefined && data.data) {
|
||||
// Handle wrapped responses
|
||||
if (Array.isArray(data.data)) {
|
||||
data.data = data.data.map(filterObject);
|
||||
} else {
|
||||
data.data = filterObject(data.data);
|
||||
}
|
||||
} else {
|
||||
data = filterObject(data);
|
||||
}
|
||||
|
||||
return originalJson(data);
|
||||
} catch (error) {
|
||||
logger.error("Field filter error", { error: error.message });
|
||||
return originalJson(data);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Pagination middleware
|
||||
* Adds pagination support: ?page=1&limit=20
|
||||
*/
|
||||
const paginate = (defaultLimit = 20, maxLimit = 100) => {
|
||||
return (req, res, next) => {
|
||||
const page = Math.max(1, parseInt(req.query.page) || 1);
|
||||
const limit = Math.min(
|
||||
maxLimit,
|
||||
Math.max(1, parseInt(req.query.limit) || defaultLimit)
|
||||
);
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
req.pagination = {
|
||||
page,
|
||||
limit,
|
||||
offset,
|
||||
maxLimit,
|
||||
};
|
||||
|
||||
// Helper to add pagination info to response
|
||||
res.paginate = (data, total) => {
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
return res.json({
|
||||
success: true,
|
||||
data,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages,
|
||||
hasNext: page < totalPages,
|
||||
hasPrev: page > 1,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Response time tracking
|
||||
*/
|
||||
const trackResponseTime = (req, res, next) => {
|
||||
const start = Date.now();
|
||||
|
||||
res.on("finish", () => {
|
||||
const duration = Date.now() - start;
|
||||
|
||||
// Log slow requests
|
||||
if (duration > 1000) {
|
||||
logger.warn("Slow API request", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
duration: `${duration}ms`,
|
||||
status: res.statusCode,
|
||||
});
|
||||
}
|
||||
|
||||
// Add response time header only if headers haven't been sent
|
||||
if (!res.headersSent) {
|
||||
res.set("X-Response-Time", `${duration}ms`);
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* ETag generation for GET requests
|
||||
* SAFEGUARD: Checks headersSent before setting headers
|
||||
*/
|
||||
const generateETag = (req, res, next) => {
|
||||
if (req.method !== "GET") {
|
||||
return next();
|
||||
}
|
||||
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
try {
|
||||
// SAFEGUARD: Don't process if headers already sent
|
||||
if (res.headersSent) {
|
||||
return originalJson(data);
|
||||
}
|
||||
|
||||
// Generate simple ETag from stringified data
|
||||
const dataStr = JSON.stringify(data);
|
||||
const etag = `W/"${Buffer.from(dataStr).length.toString(16)}"`;
|
||||
|
||||
// Check if client has cached version
|
||||
if (req.headers["if-none-match"] === etag) {
|
||||
res.status(304).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.set("ETag", etag);
|
||||
return originalJson(data);
|
||||
} catch (error) {
|
||||
logger.error("ETag generation error", { error: error.message });
|
||||
return originalJson(data);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* JSON response size optimization
|
||||
* Removes null values and compacts responses
|
||||
*/
|
||||
const optimizeJSON = (req, res, next) => {
|
||||
const originalJson = res.json.bind(res);
|
||||
|
||||
res.json = function (data) {
|
||||
if (data && typeof data === "object") {
|
||||
data = removeNulls(data);
|
||||
}
|
||||
return originalJson(data);
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
function removeNulls(obj) {
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(removeNulls);
|
||||
}
|
||||
|
||||
if (obj !== null && typeof obj === "object") {
|
||||
return Object.entries(obj).reduce((acc, [key, value]) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
acc[key] = removeNulls(value);
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch request handler
|
||||
* Allows multiple API calls in a single request
|
||||
* POST /api/batch with body: { requests: [{ method, url, body }] }
|
||||
* SAFEGUARD: Enhanced validation and error handling
|
||||
*/
|
||||
const batchHandler = async (req, res) => {
|
||||
try {
|
||||
const { requests } = req.body;
|
||||
|
||||
// SAFEGUARD: Validate requests array
|
||||
if (!Array.isArray(requests) || requests.length === 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid batch request format",
|
||||
});
|
||||
}
|
||||
|
||||
// SAFEGUARD: Limit batch size
|
||||
if (requests.length > 10) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Maximum 10 requests per batch",
|
||||
});
|
||||
}
|
||||
|
||||
// SAFEGUARD: Validate each request structure
|
||||
const isValid = requests.every(req =>
|
||||
req && typeof req === 'object' &&
|
||||
req.method && req.url &&
|
||||
['GET', 'POST', 'PUT', 'DELETE'].includes(req.method.toUpperCase())
|
||||
);
|
||||
|
||||
if (!isValid) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid request format in batch",
|
||||
});
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
requests.map(async (request) => {
|
||||
try {
|
||||
// This would require implementation of internal request handling
|
||||
// For now, return a placeholder
|
||||
return {
|
||||
status: 200,
|
||||
data: { message: "Batch processing not fully implemented" },
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 500,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// SAFEGUARD: Check if response already sent
|
||||
if (res.headersSent) {
|
||||
logger.warn("Response already sent in batch handler");
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
results: results.map((result, index) => ({
|
||||
...requests[index],
|
||||
...result,
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Batch handler error", { error: error.message, stack: error.stack });
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Batch processing failed",
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
results: results.map((result, index) => ({
|
||||
...requests[index],
|
||||
...result,
|
||||
})),
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
enableCompression,
|
||||
addCacheHeaders,
|
||||
fieldFilter,
|
||||
paginate,
|
||||
trackResponseTime,
|
||||
generateETag,
|
||||
optimizeJSON,
|
||||
batchHandler,
|
||||
};
|
||||
152
backend/middleware/bruteForceProtection.js
Normal file
152
backend/middleware/bruteForceProtection.js
Normal file
@@ -0,0 +1,152 @@
|
||||
/**
|
||||
* Brute force protection middleware
|
||||
* Tracks failed login attempts and temporarily blocks IPs with too many failures
|
||||
*/
|
||||
|
||||
const logger = require("../config/logger");
|
||||
|
||||
// Store failed attempts in memory (use Redis in production)
|
||||
const failedAttempts = new Map();
|
||||
const blockedIPs = new Map();
|
||||
|
||||
// Configuration
|
||||
const MAX_FAILED_ATTEMPTS = 5;
|
||||
const BLOCK_DURATION = 15 * 60 * 1000; // 15 minutes
|
||||
const ATTEMPT_WINDOW = 15 * 60 * 1000; // 15 minutes
|
||||
const CLEANUP_INTERVAL = 60 * 1000; // 1 minute
|
||||
|
||||
/**
|
||||
* Clean up old entries periodically
|
||||
*/
|
||||
const cleanup = () => {
|
||||
const now = Date.now();
|
||||
|
||||
// Clean up failed attempts
|
||||
for (const [ip, data] of failedAttempts.entries()) {
|
||||
if (now - data.firstAttempt > ATTEMPT_WINDOW) {
|
||||
failedAttempts.delete(ip);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up blocked IPs
|
||||
for (const [ip, blockTime] of blockedIPs.entries()) {
|
||||
if (now - blockTime > BLOCK_DURATION) {
|
||||
blockedIPs.delete(ip);
|
||||
logger.info("IP unblocked after cooldown", { ip });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start cleanup interval
|
||||
setInterval(cleanup, CLEANUP_INTERVAL);
|
||||
|
||||
/**
|
||||
* Record a failed login attempt
|
||||
* @param {string} ip - IP address
|
||||
*/
|
||||
const recordFailedAttempt = (ip) => {
|
||||
const now = Date.now();
|
||||
|
||||
if (!failedAttempts.has(ip)) {
|
||||
failedAttempts.set(ip, {
|
||||
count: 1,
|
||||
firstAttempt: now,
|
||||
});
|
||||
} else {
|
||||
const data = failedAttempts.get(ip);
|
||||
|
||||
// Reset if outside window
|
||||
if (now - data.firstAttempt > ATTEMPT_WINDOW) {
|
||||
data.count = 1;
|
||||
data.firstAttempt = now;
|
||||
} else {
|
||||
data.count++;
|
||||
}
|
||||
|
||||
// Block if too many attempts
|
||||
if (data.count >= MAX_FAILED_ATTEMPTS) {
|
||||
blockedIPs.set(ip, now);
|
||||
logger.warn("IP blocked due to failed login attempts", {
|
||||
ip,
|
||||
attempts: data.count,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reset failed attempts for an IP (on successful login)
|
||||
* @param {string} ip - IP address
|
||||
*/
|
||||
const resetFailedAttempts = (ip) => {
|
||||
failedAttempts.delete(ip);
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if an IP is currently blocked
|
||||
* @param {string} ip - IP address
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isBlocked = (ip) => {
|
||||
if (!blockedIPs.has(ip)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const blockTime = blockedIPs.get(ip);
|
||||
const now = Date.now();
|
||||
|
||||
// Check if block has expired
|
||||
if (now - blockTime > BLOCK_DURATION) {
|
||||
blockedIPs.delete(ip);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get remaining block time in seconds
|
||||
* @param {string} ip - IP address
|
||||
* @returns {number} Seconds remaining
|
||||
*/
|
||||
const getRemainingBlockTime = (ip) => {
|
||||
if (!blockedIPs.has(ip)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const blockTime = blockedIPs.get(ip);
|
||||
const elapsed = Date.now() - blockTime;
|
||||
const remaining = Math.max(0, BLOCK_DURATION - elapsed);
|
||||
|
||||
return Math.ceil(remaining / 1000);
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware to check if IP is blocked
|
||||
*/
|
||||
const checkBlocked = (req, res, next) => {
|
||||
const ip = req.ip || req.connection.remoteAddress;
|
||||
|
||||
if (isBlocked(ip)) {
|
||||
const remainingSeconds = getRemainingBlockTime(ip);
|
||||
logger.warn("Blocked IP attempted access", { ip, path: req.path });
|
||||
|
||||
return res.status(429).json({
|
||||
success: false,
|
||||
message: `Too many failed attempts. Please try again in ${Math.ceil(
|
||||
remainingSeconds / 60
|
||||
)} minutes.`,
|
||||
retryAfter: remainingSeconds,
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
recordFailedAttempt,
|
||||
resetFailedAttempts,
|
||||
isBlocked,
|
||||
checkBlocked,
|
||||
getRemainingBlockTime,
|
||||
};
|
||||
@@ -5,28 +5,63 @@
|
||||
const logger = require("../config/logger");
|
||||
|
||||
class CacheManager {
|
||||
constructor(defaultTTL = 300000) {
|
||||
// 5 minutes default
|
||||
constructor(defaultTTL = 300000, maxSize = 2000) {
|
||||
// 5 minutes default, max 2000 entries (optimized for performance)
|
||||
this.cache = new Map();
|
||||
this.defaultTTL = defaultTTL;
|
||||
this.maxSize = maxSize;
|
||||
this.stats = { hits: 0, misses: 0, evictions: 0 };
|
||||
// Use Map for O(1) LRU tracking instead of array indexOf/splice
|
||||
this.lruHead = null; // Most recently used
|
||||
this.lruTail = null; // Least recently used
|
||||
this.lruNodes = new Map(); // key -> {prev, next, key}
|
||||
}
|
||||
|
||||
set(key, value, ttl = this.defaultTTL) {
|
||||
const expiresAt = Date.now() + ttl;
|
||||
|
||||
// If key exists, remove from LRU list first
|
||||
if (this.cache.has(key)) {
|
||||
this._removeLRUNode(key);
|
||||
} else if (this.cache.size >= this.maxSize) {
|
||||
// Evict least recently used
|
||||
if (this.lruTail) {
|
||||
const evictKey = this.lruTail.key;
|
||||
this.cache.delete(evictKey);
|
||||
this._removeLRUNode(evictKey);
|
||||
this.stats.evictions++;
|
||||
logger.debug(`Cache LRU eviction: ${evictKey}`);
|
||||
}
|
||||
}
|
||||
|
||||
this.cache.set(key, { value, expiresAt });
|
||||
this._addLRUNode(key); // Add to head (most recent)
|
||||
logger.debug(`Cache set: ${key} (TTL: ${ttl}ms)`);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const cached = this.cache.get(key);
|
||||
if (!cached) return null;
|
||||
|
||||
if (Date.now() > cached.expiresAt) {
|
||||
if (!cached) {
|
||||
this.stats.misses++;
|
||||
logger.debug(`Cache miss: ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (now > cached.expiresAt) {
|
||||
this.cache.delete(key);
|
||||
this._removeLRUNode(key);
|
||||
this.stats.misses++;
|
||||
logger.debug(`Cache expired: ${key}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Move to head (most recently used) - O(1)
|
||||
this._removeLRUNode(key);
|
||||
this._addLRUNode(key);
|
||||
|
||||
this.stats.hits++;
|
||||
logger.debug(`Cache hit: ${key}`);
|
||||
return cached.value;
|
||||
}
|
||||
@@ -53,6 +88,9 @@ class CacheManager {
|
||||
clear() {
|
||||
const size = this.cache.size;
|
||||
this.cache.clear();
|
||||
this.lruNodes.clear();
|
||||
this.lruHead = null;
|
||||
this.lruTail = null;
|
||||
logger.info(`Cache cleared (${size} keys)`);
|
||||
}
|
||||
|
||||
@@ -60,6 +98,63 @@ class CacheManager {
|
||||
return this.cache.size;
|
||||
}
|
||||
|
||||
// Get cache statistics
|
||||
getStats() {
|
||||
const hitRate =
|
||||
this.stats.hits + this.stats.misses > 0
|
||||
? (
|
||||
(this.stats.hits / (this.stats.hits + this.stats.misses)) *
|
||||
100
|
||||
).toFixed(2)
|
||||
: 0;
|
||||
return {
|
||||
...this.stats,
|
||||
hitRate: `${hitRate}%`,
|
||||
size: this.cache.size,
|
||||
maxSize: this.maxSize,
|
||||
};
|
||||
}
|
||||
|
||||
// Reset statistics
|
||||
resetStats() {
|
||||
this.stats = { hits: 0, misses: 0, evictions: 0 };
|
||||
}
|
||||
|
||||
// O(1) LRU operations using doubly-linked list pattern
|
||||
_addLRUNode(key) {
|
||||
const node = { key, prev: null, next: this.lruHead };
|
||||
|
||||
if (this.lruHead) {
|
||||
this.lruHead.prev = node;
|
||||
}
|
||||
this.lruHead = node;
|
||||
|
||||
if (!this.lruTail) {
|
||||
this.lruTail = node;
|
||||
}
|
||||
|
||||
this.lruNodes.set(key, node);
|
||||
}
|
||||
|
||||
_removeLRUNode(key) {
|
||||
const node = this.lruNodes.get(key);
|
||||
if (!node) return;
|
||||
|
||||
if (node.prev) {
|
||||
node.prev.next = node.next;
|
||||
} else {
|
||||
this.lruHead = node.next;
|
||||
}
|
||||
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
} else {
|
||||
this.lruTail = node.prev;
|
||||
}
|
||||
|
||||
this.lruNodes.delete(key);
|
||||
}
|
||||
|
||||
// Clean up expired entries
|
||||
cleanup() {
|
||||
const now = Date.now();
|
||||
|
||||
@@ -1,30 +1,46 @@
|
||||
/**
|
||||
* Response Compression Middleware
|
||||
* Compresses API responses to reduce payload size
|
||||
* High-performance compression with Brotli support
|
||||
*/
|
||||
const compression = require("compression");
|
||||
const zlib = require("zlib");
|
||||
|
||||
const compressionMiddleware = compression({
|
||||
// Only compress responses larger than 1kb
|
||||
threshold: 1024,
|
||||
// Compression level (0-9, higher = better compression but slower)
|
||||
// Only compress responses larger than 512 bytes (lower threshold)
|
||||
threshold: 512,
|
||||
// Level 6 for gzip (balance between speed and ratio)
|
||||
level: 6,
|
||||
// Memory level
|
||||
memLevel: 8,
|
||||
// Use Brotli when available (better compression than gzip)
|
||||
brotli: {
|
||||
enabled: true,
|
||||
zlib: {
|
||||
[zlib.constants.BROTLI_PARAM_QUALITY]: 4, // 0-11, 4 is fast with good compression
|
||||
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT,
|
||||
},
|
||||
},
|
||||
// Filter function - don't compress already compressed formats
|
||||
filter: (req, res) => {
|
||||
if (req.headers["x-no-compression"]) {
|
||||
return false;
|
||||
}
|
||||
// Check content-type
|
||||
|
||||
const contentType = res.getHeader("Content-Type");
|
||||
if (!contentType) return compression.filter(req, res);
|
||||
|
||||
// Don't compress images, videos, or already compressed formats
|
||||
if (
|
||||
contentType.includes("image/") ||
|
||||
contentType.includes("video/") ||
|
||||
contentType.includes("application/zip") ||
|
||||
contentType.includes("application/pdf")
|
||||
) {
|
||||
const skipTypes = [
|
||||
"image/",
|
||||
"video/",
|
||||
"application/zip",
|
||||
"application/pdf",
|
||||
"application/octet-stream",
|
||||
"application/wasm",
|
||||
"font/",
|
||||
];
|
||||
|
||||
if (skipTypes.some((type) => contentType.includes(type))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -62,6 +62,15 @@ const errorHandler = (err, req, res, next) => {
|
||||
error.statusCode = errorMapping.statusCode;
|
||||
}
|
||||
|
||||
// SAFEGUARD: Don't send response if headers already sent
|
||||
if (res.headersSent) {
|
||||
logger.warn("Headers already sent in error handler", {
|
||||
path: req.path,
|
||||
error: error.message,
|
||||
});
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.status(error.statusCode).json({
|
||||
success: false,
|
||||
message: error.message || "Server error",
|
||||
@@ -89,6 +98,12 @@ const notFoundHandler = (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
// SAFEGUARD: Check if response already sent
|
||||
if (res.headersSent) {
|
||||
logger.warn("Headers already sent in 404 handler", { path: req.path });
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
message: "Route not found",
|
||||
|
||||
129
backend/middleware/imageOptimization.js
Normal file
129
backend/middleware/imageOptimization.js
Normal file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Image Optimization Middleware
|
||||
* High-performance image serving with streaming and caching
|
||||
*/
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const fsPromises = require("fs").promises;
|
||||
const logger = require("../config/logger");
|
||||
|
||||
// Cache for image metadata (not content)
|
||||
const metadataCache = new Map();
|
||||
const METADATA_CACHE_TTL = 600000; // 10 minutes
|
||||
const METADATA_CACHE_MAX = 1000;
|
||||
|
||||
// Image mime types
|
||||
const MIME_TYPES = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
".ico": "image/x-icon",
|
||||
".avif": "image/avif",
|
||||
};
|
||||
|
||||
/**
|
||||
* Get or cache image metadata
|
||||
*/
|
||||
async function getImageMetadata(filePath) {
|
||||
const cached = metadataCache.get(filePath);
|
||||
if (cached && Date.now() - cached.timestamp < METADATA_CACHE_TTL) {
|
||||
return cached.data;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fsPromises.stat(filePath);
|
||||
const metadata = {
|
||||
exists: true,
|
||||
size: stats.size,
|
||||
mtime: stats.mtime.getTime(),
|
||||
etag: `"${stats.size}-${stats.mtime.getTime()}"`,
|
||||
lastModified: stats.mtime.toUTCString(),
|
||||
};
|
||||
|
||||
// LRU eviction
|
||||
if (metadataCache.size >= METADATA_CACHE_MAX) {
|
||||
const firstKey = metadataCache.keys().next().value;
|
||||
metadataCache.delete(firstKey);
|
||||
}
|
||||
|
||||
metadataCache.set(filePath, { data: metadata, timestamp: Date.now() });
|
||||
return metadata;
|
||||
} catch {
|
||||
const notFound = { exists: false };
|
||||
metadataCache.set(filePath, { data: notFound, timestamp: Date.now() });
|
||||
return notFound;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serve optimized images with streaming and aggressive caching
|
||||
*/
|
||||
const imageOptimization = (uploadsDir) => {
|
||||
return async (req, res, next) => {
|
||||
// Only handle image requests
|
||||
const ext = path.extname(req.path).toLowerCase();
|
||||
if (!MIME_TYPES[ext]) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const imagePath = path.join(uploadsDir, req.path.replace("/uploads/", ""));
|
||||
|
||||
// Get cached metadata
|
||||
const metadata = await getImageMetadata(imagePath);
|
||||
if (!metadata.exists) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if client has cached version (304 Not Modified)
|
||||
const ifNoneMatch = req.get("if-none-match");
|
||||
const ifModifiedSince = req.get("if-modified-since");
|
||||
|
||||
if (
|
||||
ifNoneMatch === metadata.etag ||
|
||||
ifModifiedSince === metadata.lastModified
|
||||
) {
|
||||
return res.status(304).end();
|
||||
}
|
||||
|
||||
// Set aggressive caching headers
|
||||
res.set({
|
||||
"Content-Type": MIME_TYPES[ext],
|
||||
"Content-Length": metadata.size,
|
||||
"Cache-Control": "public, max-age=31536000, immutable", // 1 year
|
||||
ETag: metadata.etag,
|
||||
"Last-Modified": metadata.lastModified,
|
||||
Vary: "Accept-Encoding",
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
});
|
||||
|
||||
// Use streaming for efficient memory usage
|
||||
const readStream = fs.createReadStream(imagePath, {
|
||||
highWaterMark: 64 * 1024, // 64KB chunks
|
||||
});
|
||||
|
||||
readStream.on("error", (error) => {
|
||||
logger.error("Image stream error:", {
|
||||
path: imagePath,
|
||||
error: error.message,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.status(500).end();
|
||||
}
|
||||
});
|
||||
|
||||
readStream.pipe(res);
|
||||
} catch (error) {
|
||||
logger.error("Image serve error:", {
|
||||
path: imagePath,
|
||||
error: error.message,
|
||||
});
|
||||
next();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = { imageOptimization };
|
||||
71
backend/middleware/processHandlers.js
Normal file
71
backend/middleware/processHandlers.js
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* Global Process Error Handlers
|
||||
* Safeguards to prevent crashes from unhandled errors
|
||||
*/
|
||||
const logger = require("../config/logger");
|
||||
|
||||
/**
|
||||
* Handle uncaught exceptions
|
||||
*/
|
||||
process.on("uncaughtException", (error) => {
|
||||
logger.error("💥 Uncaught Exception", {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
});
|
||||
|
||||
// Give time to log before exiting
|
||||
setTimeout(() => {
|
||||
process.exit(1);
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
/**
|
||||
* Handle unhandled promise rejections
|
||||
*/
|
||||
process.on("unhandledRejection", (reason, promise) => {
|
||||
logger.error("💥 Unhandled Promise Rejection", {
|
||||
reason: reason instanceof Error ? reason.message : reason,
|
||||
stack: reason instanceof Error ? reason.stack : undefined,
|
||||
promise,
|
||||
});
|
||||
|
||||
// Don't exit - log and continue
|
||||
// In production, you might want to exit: process.exit(1);
|
||||
});
|
||||
|
||||
/**
|
||||
* Handle process warnings
|
||||
*/
|
||||
process.on("warning", (warning) => {
|
||||
logger.warn("⚠️ Process Warning", {
|
||||
name: warning.name,
|
||||
message: warning.message,
|
||||
stack: warning.stack,
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Handle SIGTERM gracefully
|
||||
*/
|
||||
process.on("SIGTERM", () => {
|
||||
logger.info("👋 SIGTERM received, shutting down gracefully");
|
||||
|
||||
// Give server time to close connections
|
||||
setTimeout(() => {
|
||||
process.exit(0);
|
||||
}, 10000);
|
||||
});
|
||||
|
||||
/**
|
||||
* Handle SIGINT gracefully (Ctrl+C)
|
||||
*/
|
||||
process.on("SIGINT", () => {
|
||||
logger.info("👋 SIGINT received, shutting down gracefully");
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
logger.info("✅ Global process error handlers registered");
|
||||
|
||||
module.exports = {
|
||||
// Exports for testing if needed
|
||||
};
|
||||
@@ -31,9 +31,7 @@ const validators = {
|
||||
.withMessage("Valid email is required")
|
||||
.normalizeEmail()
|
||||
.trim(),
|
||||
body("password")
|
||||
.isLength({ min: 8 })
|
||||
.withMessage("Password must be at least 8 characters"),
|
||||
body("password").notEmpty().withMessage("Password is required").trim(),
|
||||
],
|
||||
|
||||
// User validators
|
||||
@@ -51,10 +49,10 @@ const validators = {
|
||||
)
|
||||
.trim(),
|
||||
body("password")
|
||||
.isLength({ min: 8 })
|
||||
.matches(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)/)
|
||||
.isLength({ min: 12 })
|
||||
.matches(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*?&#])/)
|
||||
.withMessage(
|
||||
"Password must be at least 8 characters with uppercase, lowercase, and number"
|
||||
"Password must be at least 12 characters with uppercase, lowercase, number, and special character"
|
||||
),
|
||||
body("role_id").notEmpty().withMessage("Role is required").trim(),
|
||||
],
|
||||
|
||||
Reference in New Issue
Block a user