2025-12-19 20:44:46 -06:00
|
|
|
const { Pool } = require("pg");
|
2026-01-04 17:52:37 -06:00
|
|
|
const crypto = require("crypto");
|
2025-12-19 20:44:46 -06:00
|
|
|
const logger = require("./logger");
|
|
|
|
|
require("dotenv").config();
|
2025-12-13 17:53:34 -06:00
|
|
|
|
|
|
|
|
const pool = new Pool({
|
2025-12-19 20:44:46 -06:00
|
|
|
host: process.env.DB_HOST || "localhost",
|
2025-12-13 17:53:34 -06:00
|
|
|
port: process.env.DB_PORT || 5432,
|
2025-12-19 20:44:46 -06:00
|
|
|
database: process.env.DB_NAME || "skyartshop",
|
|
|
|
|
user: process.env.DB_USER || "skyartapp",
|
2025-12-13 17:53:34 -06:00
|
|
|
password: process.env.DB_PASSWORD,
|
2026-01-04 17:52:37 -06:00
|
|
|
max: 30, // Increased to 30 for higher concurrency
|
|
|
|
|
min: 10, // Keep 10 connections warm for instant response
|
|
|
|
|
idleTimeoutMillis: 60000,
|
|
|
|
|
connectionTimeoutMillis: 3000,
|
|
|
|
|
application_name: "skyartshop-api",
|
|
|
|
|
keepAlive: true, // TCP keepalive
|
|
|
|
|
keepAliveInitialDelayMillis: 10000,
|
|
|
|
|
statement_timeout: 30000, // 30s query timeout
|
2025-12-13 17:53:34 -06:00
|
|
|
});
|
|
|
|
|
|
2025-12-19 20:44:46 -06:00
|
|
|
pool.on("connect", () => logger.info("✓ PostgreSQL connected"));
|
|
|
|
|
pool.on("error", (err) => logger.error("PostgreSQL error:", err));
|
2025-12-13 17:53:34 -06:00
|
|
|
|
2026-01-04 17:52:37 -06:00
|
|
|
// Query cache for SELECT statements with crypto-based keys
|
|
|
|
|
const queryCache = new Map();
|
|
|
|
|
const queryCacheOrder = []; // LRU tracking
|
|
|
|
|
const QUERY_CACHE_TTL = 15000; // 15 seconds (increased)
|
|
|
|
|
const QUERY_CACHE_MAX_SIZE = 500; // 500 cached queries (increased)
|
|
|
|
|
const SLOW_QUERY_THRESHOLD = 50; // 50ms threshold (stricter)
|
|
|
|
|
|
|
|
|
|
// Generate fast cache key using crypto hash
|
|
|
|
|
const getCacheKey = (text, params) => {
|
|
|
|
|
const hash = crypto.createHash("md5");
|
|
|
|
|
hash.update(text);
|
|
|
|
|
if (params) hash.update(JSON.stringify(params));
|
|
|
|
|
return hash.digest("hex");
|
|
|
|
|
};
|
|
|
|
|
|
2025-12-13 17:53:34 -06:00
|
|
|
const query = async (text, params) => {
|
|
|
|
|
const start = Date.now();
|
2026-01-04 17:52:37 -06:00
|
|
|
const isSelect = text.trim().toUpperCase().startsWith("SELECT");
|
|
|
|
|
|
|
|
|
|
// Check cache for SELECT queries
|
|
|
|
|
if (isSelect) {
|
|
|
|
|
const cacheKey = getCacheKey(text, params);
|
|
|
|
|
const cached = queryCache.get(cacheKey);
|
|
|
|
|
|
|
|
|
|
if (cached && Date.now() - cached.timestamp < QUERY_CACHE_TTL) {
|
|
|
|
|
logger.debug("Query cache hit", { duration: Date.now() - start });
|
|
|
|
|
return cached.data;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-13 17:53:34 -06:00
|
|
|
try {
|
|
|
|
|
const res = await pool.query(text, params);
|
|
|
|
|
const duration = Date.now() - start;
|
2026-01-04 17:52:37 -06:00
|
|
|
|
|
|
|
|
// Cache SELECT queries with LRU eviction
|
|
|
|
|
if (isSelect) {
|
|
|
|
|
const cacheKey = getCacheKey(text, params);
|
|
|
|
|
|
|
|
|
|
// LRU eviction
|
|
|
|
|
if (queryCache.size >= QUERY_CACHE_MAX_SIZE) {
|
|
|
|
|
const oldestKey = queryCacheOrder.shift();
|
|
|
|
|
if (oldestKey) queryCache.delete(oldestKey);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
queryCache.set(cacheKey, { data: res, timestamp: Date.now() });
|
|
|
|
|
queryCacheOrder.push(cacheKey);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Log slow queries
|
|
|
|
|
if (duration > SLOW_QUERY_THRESHOLD) {
|
|
|
|
|
logger.warn("Slow query", {
|
|
|
|
|
duration,
|
|
|
|
|
text: text.substring(0, 100),
|
|
|
|
|
rows: res.rowCount,
|
|
|
|
|
params: params?.length || 0,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-13 17:53:34 -06:00
|
|
|
return res;
|
|
|
|
|
} catch (error) {
|
2026-01-04 17:52:37 -06:00
|
|
|
const duration = Date.now() - start;
|
|
|
|
|
logger.error("Query error", {
|
|
|
|
|
text: text.substring(0, 100),
|
|
|
|
|
error: error.message,
|
|
|
|
|
duration,
|
|
|
|
|
code: error.code,
|
|
|
|
|
});
|
2025-12-13 17:53:34 -06:00
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2025-12-19 20:44:46 -06:00
|
|
|
// Transaction helper
|
|
|
|
|
const transaction = async (callback) => {
|
|
|
|
|
const client = await pool.connect();
|
|
|
|
|
try {
|
|
|
|
|
await client.query("BEGIN");
|
|
|
|
|
const result = await callback(client);
|
|
|
|
|
await client.query("COMMIT");
|
|
|
|
|
return result;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
await client.query("ROLLBACK");
|
|
|
|
|
logger.error("Transaction rolled back:", error);
|
|
|
|
|
throw error;
|
|
|
|
|
} finally {
|
|
|
|
|
client.release();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-01-04 17:52:37 -06:00
|
|
|
// Batch query execution for parallel operations
|
|
|
|
|
const batchQuery = async (queries) => {
|
|
|
|
|
try {
|
|
|
|
|
const results = await Promise.all(
|
|
|
|
|
queries.map(({ text, params }) => query(text, params))
|
|
|
|
|
);
|
|
|
|
|
return results;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
logger.error("Batch query error:", error);
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Clear query cache (useful for cache invalidation)
|
|
|
|
|
const clearQueryCache = (pattern) => {
|
|
|
|
|
if (pattern) {
|
|
|
|
|
// Clear specific pattern
|
|
|
|
|
for (const key of queryCache.keys()) {
|
|
|
|
|
if (key.includes(pattern)) {
|
|
|
|
|
queryCache.delete(key);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Clear all
|
|
|
|
|
queryCache.clear();
|
|
|
|
|
queryCacheOrder.length = 0;
|
|
|
|
|
}
|
|
|
|
|
logger.info("Query cache cleared", { pattern: pattern || "all" });
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Health check with pool metrics
|
2025-12-19 20:44:46 -06:00
|
|
|
const healthCheck = async () => {
|
|
|
|
|
try {
|
|
|
|
|
const result = await query(
|
|
|
|
|
"SELECT NOW() as time, current_database() as database"
|
|
|
|
|
);
|
|
|
|
|
return {
|
|
|
|
|
healthy: true,
|
|
|
|
|
database: result.rows[0].database,
|
|
|
|
|
timestamp: result.rows[0].time,
|
2026-01-04 17:52:37 -06:00
|
|
|
pool: {
|
|
|
|
|
total: pool.totalCount,
|
|
|
|
|
idle: pool.idleCount,
|
|
|
|
|
waiting: pool.waitingCount,
|
|
|
|
|
},
|
|
|
|
|
cache: {
|
|
|
|
|
size: queryCache.size,
|
|
|
|
|
maxSize: QUERY_CACHE_MAX_SIZE,
|
|
|
|
|
},
|
2025-12-19 20:44:46 -06:00
|
|
|
};
|
|
|
|
|
} catch (error) {
|
|
|
|
|
logger.error("Database health check failed:", error);
|
|
|
|
|
return {
|
|
|
|
|
healthy: false,
|
|
|
|
|
error: error.message,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-01-04 17:52:37 -06:00
|
|
|
module.exports = {
|
|
|
|
|
pool,
|
|
|
|
|
query,
|
|
|
|
|
transaction,
|
|
|
|
|
batchQuery,
|
|
|
|
|
clearQueryCache,
|
|
|
|
|
healthCheck,
|
|
|
|
|
};
|