#!/usr/bin/env node const { pool, query } = require("./config/database"); async function analyzeQueryPatterns() { console.log("🔍 Analyzing Query Patterns...\n"); try { // 1. Check for missing indexes on frequently queried columns console.log("1️⃣ Checking Query Performance:"); // Test products query (most common) const productsExplain = await query(` EXPLAIN ANALYZE SELECT p.id, p.name, p.slug, p.price, p.category, p.createdat FROM products p WHERE p.isactive = true ORDER BY p.createdat DESC LIMIT 20 `); console.log(" Products listing:"); productsExplain.rows.forEach((row) => { if ( row["QUERY PLAN"].includes("Index") || row["QUERY PLAN"].includes("Seq Scan") ) { console.log(` ${row["QUERY PLAN"]}`); } }); // Test portfolio query const portfolioExplain = await query(` EXPLAIN ANALYZE SELECT id, title, category, displayorder, createdat FROM portfolioprojects WHERE isactive = true ORDER BY displayorder ASC, createdat DESC `); console.log("\n Portfolio listing:"); portfolioExplain.rows.slice(0, 3).forEach((row) => { console.log(` ${row["QUERY PLAN"]}`); }); // Test product with images (JOIN query) const productWithImagesExplain = await query(` EXPLAIN ANALYZE SELECT p.*, pi.image_url, pi.color_variant FROM products p LEFT JOIN product_images pi ON pi.product_id = p.id WHERE p.isactive = true LIMIT 10 `); console.log("\n Products with images (JOIN):"); productWithImagesExplain.rows.slice(0, 5).forEach((row) => { console.log(` ${row["QUERY PLAN"]}`); }); // 2. Check for slow queries console.log("\n2️⃣ Checking Table Statistics:"); const stats = await query(` SELECT schemaname, relname as tablename, n_live_tup as row_count, n_dead_tup as dead_rows, CASE WHEN n_live_tup > 0 THEN round(100.0 * n_dead_tup / n_live_tup, 2) ELSE 0 END as bloat_pct, last_vacuum, last_analyze FROM pg_stat_user_tables WHERE schemaname = 'public' AND relname IN ('products', 'product_images', 'portfolioprojects', 'blogposts', 'pages') ORDER BY n_live_tup DESC `); console.log(" Table health:"); stats.rows.forEach((row) => { console.log( ` ${row.tablename.padEnd(20)} ${String(row.row_count).padStart( 6 )} rows, ${String(row.dead_rows).padStart(4)} dead (${String( row.bloat_pct ).padStart(5)}% bloat)` ); }); // 3. Check index usage console.log("\n3️⃣ Index Usage Statistics:"); const indexUsage = await query(` SELECT schemaname, relname as tablename, indexrelname as indexname, idx_scan as scans, idx_tup_read as rows_read, idx_tup_fetch as rows_fetched FROM pg_stat_user_indexes WHERE schemaname = 'public' AND relname IN ('products', 'product_images', 'portfolioprojects', 'blogposts', 'pages') AND idx_scan > 0 ORDER BY idx_scan DESC LIMIT 15 `); console.log(" Most used indexes:"); indexUsage.rows.forEach((row) => { console.log( ` ${row.indexname.padEnd(40)} ${String(row.scans).padStart( 6 )} scans` ); }); // 4. Check for unused indexes const unusedIndexes = await query(` SELECT schemaname, relname as tablename, indexrelname as indexname FROM pg_stat_user_indexes WHERE schemaname = 'public' AND relname IN ('products', 'product_images', 'portfolioprojects', 'blogposts', 'pages') AND idx_scan = 0 AND indexrelname NOT LIKE '%_pkey' ORDER BY relname, indexrelname `); if (unusedIndexes.rows.length > 0) { console.log("\n4️⃣ Unused Indexes (consider removing):"); unusedIndexes.rows.forEach((row) => { console.log(` ${row.tablename}.${row.indexname}`); }); } else { console.log("\n4️⃣ ✅ All indexes are being used"); } // 5. Check cache hit ratio console.log("\n5️⃣ Cache Hit Ratio:"); const cacheHit = await query(` SELECT sum(heap_blks_read) as heap_read, sum(heap_blks_hit) as heap_hit, CASE WHEN sum(heap_blks_hit) + sum(heap_blks_read) > 0 THEN round(100.0 * sum(heap_blks_hit) / (sum(heap_blks_hit) + sum(heap_blks_read)), 2) ELSE 0 END as cache_hit_ratio FROM pg_statio_user_tables WHERE schemaname = 'public' `); const ratio = cacheHit.rows[0].cache_hit_ratio; const status = ratio > 99 ? "✅" : ratio > 95 ? "⚠️" : "❌"; console.log(` ${status} ${ratio}% (target: >99%)`); console.log("\n✅ Analysis complete!"); } catch (error) { console.error("❌ Error:", error.message); } finally { await pool.end(); } } analyzeQueryPatterns();