Node.js Performance Optimization Techniques
Node.js Performance Optimization Techniques
Node.js is renowned for its performance in I/O-intensive applications, but like any technology, it requires careful optimization to reach its full potential. In this comprehensive guide, we’ll explore proven techniques to boost your Node.js application’s performance.
Understanding Node.js Performance Characteristics
Node.js operates on a single-threaded event loop model, which makes it excellent for I/O-intensive tasks but can create bottlenecks for CPU-intensive operations. Understanding this fundamental architecture is key to optimization.
The Event Loop
The event loop is the heart of Node.js performance:
// Blocking operation - Avoid this!
function blockingOperation() {
let sum = 0;
for (let i = 0; i < 1000000000; i++) {
sum += i;
}
return sum;
}
// Non-blocking alternative
function nonBlockingOperation(callback) {
setImmediate(() => {
let sum = 0;
for (let i = 0; i < 1000000000; i++) {
sum += i;
}
callback(sum);
});
}
Memory Management Optimization
1. Avoid Memory Leaks
Common memory leak patterns and their solutions:
// Memory leak - Global variables
// Bad
let cache = {};
function addToCache(key, value) {
cache[key] = value; // Cache grows indefinitely
}
// Good - Use LRU cache with size limit
const LRU = require('lru-cache');
const cache = new LRU({ max: 500 });
function addToCache(key, value) {
cache.set(key, value);
}
2. Optimize Object Creation
// Inefficient - Creates new objects repeatedly
function processUsers(users) {
return users.map(user => {
return {
id: user.id,
name: user.name,
email: user.email
};
});
}
// Better - Reuse object structure
function processUsersOptimized(users) {
const result = [];
for (let i = 0; i < users.length; i++) {
const user = users[i];
result[i] = {
id: user.id,
name: user.name,
email: user.email
};
}
return result;
}
3. Use Streaming for Large Data
const fs = require('fs');
const zlib = require('zlib');
// Bad - Loads entire file into memory
function compressFileBad(inputFile, outputFile) {
const data = fs.readFileSync(inputFile);
const compressed = zlib.gzipSync(data);
fs.writeFileSync(outputFile, compressed);
}
// Good - Streaming approach
function compressFileGood(inputFile, outputFile) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(inputFile);
const writeStream = fs.createWriteStream(outputFile);
const gzipStream = zlib.createGzip();
readStream
.pipe(gzipStream)
.pipe(writeStream)
.on('finish', resolve)
.on('error', reject);
});
}
CPU Optimization Techniques
1. Use Worker Threads for CPU-Intensive Tasks
// main.js
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');
if (isMainThread) {
// Main thread
function heavyComputation(data) {
return new Promise((resolve, reject) => {
const worker = new Worker(__filename, {
workerData: data
});
worker.on('message', resolve);
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Worker stopped with exit code ${code}`));
}
});
});
}
// Usage
heavyComputation({ numbers: [1, 2, 3, 4, 5] })
.then(result => console.log('Result:', result));
} else {
// Worker thread
function fibonacci(n) {
if (n < 2) return n;
return fibonacci(n - 1) + fibonacci(n - 2);
}
const { numbers } = workerData;
const results = numbers.map(num => fibonacci(num));
parentPort.postMessage(results);
}
2. Optimize Algorithms and Data Structures
// Inefficient lookup - O(n)
function findUserBad(users, id) {
return users.find(user => user.id === id);
}
// Efficient lookup with Map - O(1)
class UserManager {
constructor(users) {
this.userMap = new Map();
users.forEach(user => this.userMap.set(user.id, user));
}
findUser(id) {
return this.userMap.get(id);
}
}
Database Optimization
1. Connection Pooling
const mysql = require('mysql2/promise');
// Bad - Creating new connections
async function queryBad(sql, params) {
const connection = await mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'password',
database: 'mydb'
});
const [rows] = await connection.execute(sql, params);
await connection.end();
return rows;
}
// Good - Using connection pool
const pool = mysql.createPool({
host: 'localhost',
user: 'root',
password: 'password',
database: 'mydb',
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
async function queryGood(sql, params) {
const [rows] = await pool.execute(sql, params);
return rows;
}
2. Query Optimization
// Inefficient - N+1 query problem
async function getUsersWithPostsBad() {
const users = await db.query('SELECT * FROM users');
for (const user of users) {
user.posts = await db.query('SELECT * FROM posts WHERE user_id = ?', [user.id]);
}
return users;
}
// Efficient - Single query with JOIN
async function getUsersWithPostsGood() {
const query = `
SELECT
u.id, u.name, u.email,
p.id as post_id, p.title, p.content
FROM users u
LEFT JOIN posts p ON u.id = p.user_id
`;
const rows = await db.query(query);
// Group results
const usersMap = new Map();
rows.forEach(row => {
if (!usersMap.has(row.id)) {
usersMap.set(row.id, {
id: row.id,
name: row.name,
email: row.email,
posts: []
});
}
if (row.post_id) {
usersMap.get(row.id).posts.push({
id: row.post_id,
title: row.title,
content: row.content
});
}
});
return Array.from(usersMap.values());
}
Caching Strategies
1. In-Memory Caching
const NodeCache = require('node-cache');
const cache = new NodeCache({ stdTTL: 600 }); // 10 minutes TTL
async function getCachedData(key, fetchFunction) {
let data = cache.get(key);
if (data == null) {
data = await fetchFunction();
cache.set(key, data);
}
return data;
}
// Usage
async function getUser(id) {
return getCachedData(`user:${id}`, () =>
db.query('SELECT * FROM users WHERE id = ?', [id])
);
}
2. Redis Caching
const redis = require('redis');
const client = redis.createClient();
class CacheManager {
constructor() {
this.client = client;
}
async get(key) {
try {
const value = await this.client.get(key);
return value ? JSON.parse(value) : null;
} catch (error) {
console.error('Cache get error:', error);
return null;
}
}
async set(key, value, ttl = 3600) {
try {
await this.client.setex(key, ttl, JSON.stringify(value));
} catch (error) {
console.error('Cache set error:', error);
}
}
async del(key) {
try {
await this.client.del(key);
} catch (error) {
console.error('Cache delete error:', error);
}
}
}
HTTP Optimization
1. Response Compression
const express = require('express');
const compression = require('compression');
const app = express();
// Enable gzip compression
app.use(compression({
filter: (req, res) => {
if (req.headers['x-no-compression']) {
return false;
}
return compression.filter(req, res);
},
level: 6,
threshold: 100 * 1000 // Only compress responses > 100KB
}));
2. HTTP/2 and Keep-Alive
const http2 = require('http2');
const fs = require('fs');
// HTTP/2 server
const server = http2.createSecureServer({
key: fs.readFileSync('private-key.pem'),
cert: fs.readFileSync('certificate.pem')
});
server.on('stream', (stream, headers) => {
stream.respond({
'content-type': 'application/json',
':status': 200
});
stream.end(JSON.stringify({ message: 'Hello HTTP/2!' }));
});
// Keep-Alive for HTTP/1.1
const express = require('express');
const app = express();
app.use((req, res, next) => {
res.set('Connection', 'keep-alive');
res.set('Keep-Alive', 'timeout=5, max=1000');
next();
});
Monitoring and Profiling
1. Performance Monitoring
const EventEmitter = require('events');
class PerformanceMonitor extends EventEmitter {
constructor() {
super();
this.metrics = {
requestCount: 0,
responseTime: [],
memoryUsage: []
};
this.startMemoryMonitoring();
}
trackRequest(req, res, next) {
const start = process.hrtime.bigint();
this.metrics.requestCount++;
res.on('finish', () => {
const duration = Number(process.hrtime.bigint() - start) / 1e6; // Convert to ms
this.metrics.responseTime.push(duration);
if (this.metrics.responseTime.length > 1000) {
this.metrics.responseTime.shift(); // Keep only last 1000 measurements
}
this.emit('request', { duration, statusCode: res.statusCode });
});
next();
}
startMemoryMonitoring() {
setInterval(() => {
const usage = process.memoryUsage();
this.metrics.memoryUsage.push({
timestamp: Date.now(),
rss: usage.rss,
heapUsed: usage.heapUsed,
heapTotal: usage.heapTotal
});
if (this.metrics.memoryUsage.length > 100) {
this.metrics.memoryUsage.shift();
}
}, 10000); // Every 10 seconds
}
getMetrics() {
const avgResponseTime = this.metrics.responseTime.length > 0
? this.metrics.responseTime.reduce((a, b) => a + b) / this.metrics.responseTime.length
: 0;
return {
requestCount: this.metrics.requestCount,
avgResponseTime,
currentMemory: process.memoryUsage(),
memoryHistory: this.metrics.memoryUsage
};
}
}
// Usage
const monitor = new PerformanceMonitor();
app.use(monitor.trackRequest.bind(monitor));
app.get('/metrics', (req, res) => {
res.json(monitor.getMetrics());
});
2. CPU Profiling
// cpu-profiler.js
const fs = require('fs');
const { performance, PerformanceObserver } = require('perf_hooks');
class CPUProfiler {
constructor() {
this.profiles = [];
this.observer = new PerformanceObserver((list) => {
list.getEntries().forEach((entry) => {
this.profiles.push({
name: entry.name,
duration: entry.duration,
timestamp: entry.startTime
});
});
});
this.observer.observe({ entryTypes: ['measure'] });
}
startProfiling(name) {
performance.mark(`${name}-start`);
}
endProfiling(name) {
performance.mark(`${name}-end`);
performance.measure(name, `${name}-start`, `${name}-end`);
}
saveProfile(filename) {
fs.writeFileSync(filename, JSON.stringify(this.profiles, null, 2));
}
}
// Usage
const profiler = new CPUProfiler();
function slowFunction() {
profiler.startProfiling('slowFunction');
// Simulate slow operation
let sum = 0;
for (let i = 0; i < 1000000; i++) {
sum += Math.random();
}
profiler.endProfiling('slowFunction');
return sum;
}
Production Optimization
1. PM2 Cluster Mode
// ecosystem.config.js
module.exports = {
apps: [{
name: 'my-app',
script: './app.js',
instances: 'max', // Use all CPU cores
exec_mode: 'cluster',
env: {
NODE_ENV: 'production'
},
env_production: {
NODE_ENV: 'production',
PORT: 3000
}
}]
};
2. Graceful Shutdown
const express = require('express');
const app = express();
let server;
// Graceful shutdown
process.on('SIGTERM', gracefulShutdown);
process.on('SIGINT', gracefulShutdown);
function gracefulShutdown(signal) {
console.log(`Received ${signal}. Starting graceful shutdown...`);
server.close((err) => {
if (err) {
console.error('Error during server close:', err);
process.exit(1);
}
console.log('Server closed. Exiting...');
process.exit(0);
});
// Force exit after 30 seconds
setTimeout(() => {
console.error('Could not close connections in time, forcefully shutting down');
process.exit(1);
}, 30000);
}
server = app.listen(3000, () => {
console.log('Server running on port 3000');
});
Conclusion
Optimizing Node.js applications requires a holistic approach covering memory management, CPU usage, database interactions, caching, and monitoring. Key takeaways:
- Understand the event loop and avoid blocking operations
- Manage memory carefully to prevent leaks and reduce GC pressure
- Use worker threads for CPU-intensive tasks
- Optimize database queries and use connection pooling
- Implement effective caching strategies
- Monitor performance continuously
- Use clustering in production for better utilization
By applying these techniques systematically, you can significantly improve your Node.js application’s performance, scalability, and reliability. Remember that optimization is an iterative process—measure, optimize, and measure again to ensure your changes have the desired effect.