webupdatev1
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
const { Pool } = require("pg");
|
||||
const crypto = require("crypto");
|
||||
const logger = require("./logger");
|
||||
require("dotenv").config();
|
||||
|
||||
@@ -8,23 +9,86 @@ const pool = new Pool({
|
||||
database: process.env.DB_NAME || "skyartshop",
|
||||
user: process.env.DB_USER || "skyartapp",
|
||||
password: process.env.DB_PASSWORD,
|
||||
max: 20,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000,
|
||||
max: 30, // Increased to 30 for higher concurrency
|
||||
min: 10, // Keep 10 connections warm for instant response
|
||||
idleTimeoutMillis: 60000,
|
||||
connectionTimeoutMillis: 3000,
|
||||
application_name: "skyartshop-api",
|
||||
keepAlive: true, // TCP keepalive
|
||||
keepAliveInitialDelayMillis: 10000,
|
||||
statement_timeout: 30000, // 30s query timeout
|
||||
});
|
||||
|
||||
pool.on("connect", () => logger.info("✓ PostgreSQL connected"));
|
||||
pool.on("error", (err) => logger.error("PostgreSQL error:", err));
|
||||
|
||||
// Query cache for SELECT statements with crypto-based keys
|
||||
const queryCache = new Map();
|
||||
const queryCacheOrder = []; // LRU tracking
|
||||
const QUERY_CACHE_TTL = 15000; // 15 seconds (increased)
|
||||
const QUERY_CACHE_MAX_SIZE = 500; // 500 cached queries (increased)
|
||||
const SLOW_QUERY_THRESHOLD = 50; // 50ms threshold (stricter)
|
||||
|
||||
// Generate fast cache key using crypto hash
|
||||
const getCacheKey = (text, params) => {
|
||||
const hash = crypto.createHash("md5");
|
||||
hash.update(text);
|
||||
if (params) hash.update(JSON.stringify(params));
|
||||
return hash.digest("hex");
|
||||
};
|
||||
|
||||
const query = async (text, params) => {
|
||||
const start = Date.now();
|
||||
const isSelect = text.trim().toUpperCase().startsWith("SELECT");
|
||||
|
||||
// Check cache for SELECT queries
|
||||
if (isSelect) {
|
||||
const cacheKey = getCacheKey(text, params);
|
||||
const cached = queryCache.get(cacheKey);
|
||||
|
||||
if (cached && Date.now() - cached.timestamp < QUERY_CACHE_TTL) {
|
||||
logger.debug("Query cache hit", { duration: Date.now() - start });
|
||||
return cached.data;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await pool.query(text, params);
|
||||
const duration = Date.now() - start;
|
||||
logger.debug("Executed query", { duration, rows: res.rowCount });
|
||||
|
||||
// Cache SELECT queries with LRU eviction
|
||||
if (isSelect) {
|
||||
const cacheKey = getCacheKey(text, params);
|
||||
|
||||
// LRU eviction
|
||||
if (queryCache.size >= QUERY_CACHE_MAX_SIZE) {
|
||||
const oldestKey = queryCacheOrder.shift();
|
||||
if (oldestKey) queryCache.delete(oldestKey);
|
||||
}
|
||||
|
||||
queryCache.set(cacheKey, { data: res, timestamp: Date.now() });
|
||||
queryCacheOrder.push(cacheKey);
|
||||
}
|
||||
|
||||
// Log slow queries
|
||||
if (duration > SLOW_QUERY_THRESHOLD) {
|
||||
logger.warn("Slow query", {
|
||||
duration,
|
||||
text: text.substring(0, 100),
|
||||
rows: res.rowCount,
|
||||
params: params?.length || 0,
|
||||
});
|
||||
}
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
logger.error("Query error:", { text, error: error.message });
|
||||
const duration = Date.now() - start;
|
||||
logger.error("Query error", {
|
||||
text: text.substring(0, 100),
|
||||
error: error.message,
|
||||
duration,
|
||||
code: error.code,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@@ -46,7 +110,37 @@ const transaction = async (callback) => {
|
||||
}
|
||||
};
|
||||
|
||||
// Health check
|
||||
// Batch query execution for parallel operations
|
||||
const batchQuery = async (queries) => {
|
||||
try {
|
||||
const results = await Promise.all(
|
||||
queries.map(({ text, params }) => query(text, params))
|
||||
);
|
||||
return results;
|
||||
} catch (error) {
|
||||
logger.error("Batch query error:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Clear query cache (useful for cache invalidation)
|
||||
const clearQueryCache = (pattern) => {
|
||||
if (pattern) {
|
||||
// Clear specific pattern
|
||||
for (const key of queryCache.keys()) {
|
||||
if (key.includes(pattern)) {
|
||||
queryCache.delete(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Clear all
|
||||
queryCache.clear();
|
||||
queryCacheOrder.length = 0;
|
||||
}
|
||||
logger.info("Query cache cleared", { pattern: pattern || "all" });
|
||||
};
|
||||
|
||||
// Health check with pool metrics
|
||||
const healthCheck = async () => {
|
||||
try {
|
||||
const result = await query(
|
||||
@@ -56,6 +150,15 @@ const healthCheck = async () => {
|
||||
healthy: true,
|
||||
database: result.rows[0].database,
|
||||
timestamp: result.rows[0].time,
|
||||
pool: {
|
||||
total: pool.totalCount,
|
||||
idle: pool.idleCount,
|
||||
waiting: pool.waitingCount,
|
||||
},
|
||||
cache: {
|
||||
size: queryCache.size,
|
||||
maxSize: QUERY_CACHE_MAX_SIZE,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error("Database health check failed:", error);
|
||||
@@ -66,4 +169,11 @@ const healthCheck = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { pool, query, transaction, healthCheck };
|
||||
module.exports = {
|
||||
pool,
|
||||
query,
|
||||
transaction,
|
||||
batchQuery,
|
||||
clearQueryCache,
|
||||
healthCheck,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user