Node.js performance optimization is crucial for building scalable applications that can handle high traffic loads. This comprehensive guide covers advanced techniques to maximize your Node.js application performance.
Event Loop Optimization
Understanding and optimizing the Node.js event loop is fundamental to performance:
// ❌ Blocking the event loop
function fibonacci(n) {
if (n < 2) return n;
return fibonacci(n - 1) + fibonacci(n - 2);
}
// ✅ Non-blocking with memoization
const fibCache = new Map();
function fibonacciOptimized(n) {
if (fibCache.has(n)) return fibCache.get(n);
if (n < 2) return n;
const result = fibonacciOptimized(n - 1) + fibonacciOptimized(n - 2);
fibCache.set(n, result);
return result;
}
Memory Management
Garbage Collection Optimization
// Monitor memory usage
process.on('exit', () => {
const memUsage = process.memoryUsage();
console.log('Memory Usage:', {
rss: Math.round(memUsage.rss / 1024 / 1024) + ' MB',
heapTotal: Math.round(memUsage.heapTotal / 1024 / 1024) + ' MB',
heapUsed: Math.round(memUsage.heapUsed / 1024 / 1024) + ' MB'
});
});
// Efficient object creation
class ObjectPool {
constructor(createFn, resetFn, initialSize = 10) {
this.createFn = createFn;
this.resetFn = resetFn;
this.pool = [];
for (let i = 0; i < initialSize; i++) {
this.pool.push(this.createFn());
}
}
acquire() {
return this.pool.length > 0 ? this.pool.pop() : this.createFn();
}
release(obj) {
this.resetFn(obj);
this.pool.push(obj);
}
}
Database Optimization
Connection Pooling
const { Pool } = require('pg');
const pool = new Pool({
host: process.env.DB_HOST,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
port: process.env.DB_PORT,
max: 20, // Maximum connections
idleTimeoutMillis: 30000, // Close idle connections after 30s
connectionTimeoutMillis: 2000, // Return error after 2s if no connection available
});
// Efficient query execution
async function getUserWithCache(userId) {
const cacheKey = user:${userId}
;
// Check cache first
let user = await redis.get(cacheKey);
if (user) return JSON.parse(user);
// Query database
const result = await pool.query('SELECT * FROM users WHERE id = $1', [userId]);
user = result.rows[0];
// Cache result
await redis.setex(cacheKey, 300, JSON.stringify(user));
return user;
}
Clustering and Load Balancing
const cluster = require('cluster');
const numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
console.log(Master ${process.pid} is running
);
// Fork workers
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(Worker ${worker.process.pid} died
);
cluster.fork(); // Restart dead worker
});
} else {
// Worker process
require('./app.js');
console.log(Worker ${process.pid} started
);
}
Caching Strategies
Redis Implementation
const redis = require('redis');
const client = redis.createClient();
// Multi-layer caching
class CacheManager {
constructor() {
this.memoryCache = new Map();
this.maxMemorySize = 1000;
}
async get(key) {
// Check memory cache first
if (this.memoryCache.has(key)) {
return this.memoryCache.get(key);
}
// Check Redis cache
const redisValue = await client.get(key);
if (redisValue) {
const parsed = JSON.parse(redisValue);
this.setMemoryCache(key, parsed);
return parsed;
}
return null;
}
async set(key, value, ttl = 300) {
this.setMemoryCache(key, value);
await client.setex(key, ttl, JSON.stringify(value));
}
setMemoryCache(key, value) {
if (this.memoryCache.size >= this.maxMemorySize) {
const firstKey = this.memoryCache.keys().next().value;
this.memoryCache.delete(firstKey);
}
this.memoryCache.set(key, value);
}
}
HTTP/2 and Compression
const http2 = require('http2');
const fs = require('fs');
const compression = require('compression');
// HTTP/2 server with server push
const server = http2.createSecureServer({
key: fs.readFileSync('private-key.pem'),
cert: fs.readFileSync('certificate.pem')
});
server.on('stream', (stream, headers) => {
if (headers[':path'] === '/') {
// Push critical resources
stream.pushStream({ ':path': '/styles.css' }, (err, pushStream) => {
if (!err) {
pushStream.respond({ ':status': 200, 'content-type': 'text/css' });
pushStream.end(fs.readFileSync('public/styles.css'));
}
});
stream.respond({ ':status': 200, 'content-type': 'text/html' });
stream.end('
');
}
});
Monitoring and Profiling
// Performance monitoring
const performanceObserver = new PerformanceObserver((list) => {
list.getEntries().forEach((entry) => {
console.log(${entry.name}: ${entry.duration}ms
);
});
});
performanceObserver.observe({ entryTypes: ['measure', 'navigation'] });
// Custom metrics
function measureAsync(name, fn) {
return async (...args) => {
const start = Date.now();
try {
const result = await fn(...args);
const duration = Date.now() - start;
console.log(${name} completed in ${duration}ms
);
return result;
} catch (error) {
const duration = Date.now() - start;
console.log(${name} failed after ${duration}ms
);
throw error;
}
};
}
Regular performance monitoring, profiling, and optimization are essential for maintaining high-performance Node.js applications at scale.