Backend

Node.js Performance Optimization Strategies

Comprehensive guide to optimizing Node.js applications for better performance and scalability

January 25, 2024By Vikash Kumar7 min read
nodejsperformanceoptimizationscalability

Node.js Performance Optimization Strategies

Node.js applications can achieve excellent performance when properly optimized. This guide covers essential strategies for improving the performance and scalability of your Node.js applications.

Understanding Node.js Performance

Event Loop Optimization

The event loop is the heart of Node.js. Understanding how to work with it effectively is crucial for performance.

// Avoid blocking the event loop
// Bad: Synchronous file operations
const fs = require('fs');
const data = fs.readFileSync('large-file.txt', 'utf8'); // Blocks event loop

// Good: Asynchronous operations
const fs = require('fs').promises;
const data = await fs.readFile('large-file.txt', 'utf8'); // Non-blocking

// Better: Streaming for large files
const fs = require('fs');
const stream = fs.createReadStream('large-file.txt');
stream.on('data', (chunk) => {
  // Process chunk
});

CPU-Intensive Tasks

For CPU-intensive operations, use worker threads or child processes to avoid blocking the main thread.

// Using Worker Threads
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');

if (isMainThread) {
  // Main thread
  function runWorker(data) {
    return new Promise((resolve, reject) => {
      const worker = new Worker(__filename, { workerData: data });
      worker.on('message', resolve);
      worker.on('error', reject);
      worker.on('exit', (code) => {
        if (code !== 0) {
          reject(new Error(`Worker stopped with exit code ${code}`));
        }
      });
    });
  }

  // Usage
  const result = await runWorker({ numbers: [1, 2, 3, 4, 5] });
} else {
  // Worker thread
  function heavyComputation(numbers) {
    return numbers.reduce((sum, num) => sum + Math.pow(num, 2), 0);
  }

  const result = heavyComputation(workerData.numbers);
  parentPort.postMessage(result);
}

Memory Management

Memory Leaks Prevention

Common sources of memory leaks and how to avoid them:

// 1. Event Listeners
class DataProcessor {
  constructor() {
    this.eventEmitter = new EventEmitter();
    
    // Bad: Listener not removed
    this.eventEmitter.on('data', this.processData);
  }
  
  destroy() {
    // Good: Clean up listeners
    this.eventEmitter.removeAllListeners();
    this.eventEmitter = null;
  }
}

// 2. Timers
class PeriodicTask {
  constructor() {
    this.intervalId = setInterval(() => {
      this.doWork();
    }, 1000);
  }
  
  destroy() {
    // Always clear timers
    clearInterval(this.intervalId);
  }
}

// 3. Closures holding references
function createHandler() {
  const largeData = new Array(1000000).fill('data');
  
  return function handler(req, res) {
    // This closure keeps largeData in memory
    // Make sure you actually need it
    res.json({ status: 'ok' });
  };
}

Memory Monitoring

// Monitor memory usage
function logMemoryUsage() {
  const used = process.memoryUsage();
  console.log({
    rss: `${Math.round(used.rss / 1024 / 1024 * 100) / 100} MB`,
    heapTotal: `${Math.round(used.heapTotal / 1024 / 1024 * 100) / 100} MB`,
    heapUsed: `${Math.round(used.heapUsed / 1024 / 1024 * 100) / 100} MB`,
    external: `${Math.round(used.external / 1024 / 1024 * 100) / 100} MB`,
  });
}

// Log memory usage periodically
setInterval(logMemoryUsage, 5000);

Database Optimization

Connection Pooling

// PostgreSQL with connection pooling
const { Pool } = require('pg');

const pool = new Pool({
  user: 'username',
  host: 'localhost',
  database: 'mydb',
  password: 'password',
  port: 5432,
  max: 20, // Maximum number of connections
  idleTimeoutMillis: 30000,
  connectionTimeoutMillis: 2000,
});

// Efficient query execution
async function getUser(id) {
  const client = await pool.connect();
  try {
    const result = await client.query('SELECT * FROM users WHERE id = $1', [id]);
    return result.rows[0];
  } finally {
    client.release(); // Always release the connection
  }
}

Query Optimization

// Use prepared statements for repeated queries
const getUserQuery = 'SELECT * FROM users WHERE id = $1';
const prepared = await pool.query('PREPARE get_user AS ' + getUserQuery);

// Batch operations
async function createUsers(users) {
  const client = await pool.connect();
  try {
    await client.query('BEGIN');
    
    for (const user of users) {
      await client.query(
        'INSERT INTO users (name, email) VALUES ($1, $2)',
        [user.name, user.email]
      );
    }
    
    await client.query('COMMIT');
  } catch (error) {
    await client.query('ROLLBACK');
    throw error;
  } finally {
    client.release();
  }
}

Caching Strategies

In-Memory Caching

// Simple LRU cache implementation
class LRUCache {
  constructor(maxSize = 100) {
    this.maxSize = maxSize;
    this.cache = new Map();
  }
  
  get(key) {
    if (this.cache.has(key)) {
      // Move to end (most recently used)
      const value = this.cache.get(key);
      this.cache.delete(key);
      this.cache.set(key, value);
      return value;
    }
    return null;
  }
  
  set(key, value) {
    if (this.cache.has(key)) {
      this.cache.delete(key);
    } else if (this.cache.size >= this.maxSize) {
      // Remove least recently used (first item)
      const firstKey = this.cache.keys().next().value;
      this.cache.delete(firstKey);
    }
    
    this.cache.set(key, value);
  }
}

// Usage
const cache = new LRUCache(1000);

async function getExpensiveData(key) {
  // Check cache first
  let data = cache.get(key);
  if (data) {
    return data;
  }
  
  // Fetch from database
  data = await database.query('SELECT * FROM expensive_view WHERE key = ?', [key]);
  
  // Cache the result
  cache.set(key, data);
  return data;
}

Redis Caching

const redis = require('redis');
const client = redis.createClient();

// Cache with expiration
async function cacheWithTTL(key, data, ttlSeconds = 3600) {
  await client.setex(key, ttlSeconds, JSON.stringify(data));
}

// Get from cache with fallback
async function getCachedData(key, fallbackFn) {
  try {
    const cached = await client.get(key);
    if (cached) {
      return JSON.parse(cached);
    }
  } catch (error) {
    console.error('Cache error:', error);
  }
  
  // Fallback to original data source
  const data = await fallbackFn();
  
  // Cache for next time (fire and forget)
  cacheWithTTL(key, data).catch(console.error);
  
  return data;
}

HTTP Performance

Response Compression

const express = require('express');
const compression = require('compression');

const app = express();

// Enable gzip compression
app.use(compression({
  level: 6, // Compression level (1-9)
  threshold: 1024, // Only compress responses > 1KB
  filter: (req, res) => {
    // Don't compress if client doesn't support it
    if (req.headers['x-no-compression']) {
      return false;
    }
    return compression.filter(req, res);
  }
}));

HTTP/2 and Keep-Alive

const http2 = require('http2');
const fs = require('fs');

// HTTP/2 server
const server = http2.createSecureServer({
  key: fs.readFileSync('private-key.pem'),
  cert: fs.readFileSync('certificate.pem')
});

server.on('stream', (stream, headers) => {
  stream.respond({
    'content-type': 'application/json',
    ':status': 200
  });
  
  stream.end(JSON.stringify({ message: 'Hello HTTP/2!' }));
});

// For HTTP/1.1, enable keep-alive
const http = require('http');
const server = http.createServer((req, res) => {
  res.setHeader('Connection', 'keep-alive');
  res.setHeader('Keep-Alive', 'timeout=5, max=1000');
  // Handle request
});

Monitoring and Profiling

Performance Monitoring

// Custom performance monitoring
class PerformanceMonitor {
  constructor() {
    this.metrics = new Map();
  }
  
  startTimer(name) {
    this.metrics.set(name, process.hrtime.bigint());
  }
  
  endTimer(name) {
    const start = this.metrics.get(name);
    if (start) {
      const duration = Number(process.hrtime.bigint() - start) / 1000000; // Convert to ms
      console.log(`${name}: ${duration.toFixed(2)}ms`);
      this.metrics.delete(name);
      return duration;
    }
  }
  
  // Middleware for Express
  middleware() {
    return (req, res, next) => {
      const start = process.hrtime.bigint();
      
      res.on('finish', () => {
        const duration = Number(process.hrtime.bigint() - start) / 1000000;
        console.log(`${req.method} ${req.path}: ${duration.toFixed(2)}ms`);
      });
      
      next();
    };
  }
}

const monitor = new PerformanceMonitor();
app.use(monitor.middleware());

CPU Profiling

// CPU profiling with built-in profiler
const { Session } = require('inspector');
const fs = require('fs');

function startProfiling() {
  const session = new Session();
  session.connect();
  
  session.post('Profiler.enable', () => {
    session.post('Profiler.start', () => {
      console.log('Profiling started');
    });
  });
  
  return session;
}

function stopProfiling(session) {
  session.post('Profiler.stop', (err, { profile }) => {
    if (!err) {
      fs.writeFileSync('profile.cpuprofile', JSON.stringify(profile));
      console.log('Profile saved to profile.cpuprofile');
    }
    session.disconnect();
  });
}

Deployment Optimization

Clustering

const cluster = require('cluster');
const numCPUs = require('os').cpus().length;

if (cluster.isMaster) {
  console.log(`Master ${process.pid} is running`);
  
  // Fork workers
  for (let i = 0; i < numCPUs; i++) {
    cluster.fork();
  }
  
  cluster.on('exit', (worker, code, signal) => {
    console.log(`Worker ${worker.process.pid} died`);
    cluster.fork(); // Restart worker
  });
} else {
  // Worker process
  require('./app.js');
  console.log(`Worker ${process.pid} started`);
}

Process Management with PM2

// ecosystem.config.js
module.exports = {
  apps: [{
    name: 'my-app',
    script: './app.js',
    instances: 'max', // Use all CPU cores
    exec_mode: 'cluster',
    env: {
      NODE_ENV: 'production',
      PORT: 3000
    },
    max_memory_restart: '1G',
    node_args: '--max-old-space-size=1024'
  }]
};

Conclusion

Node.js performance optimization requires attention to multiple areas: event loop management, memory usage, database interactions, caching, and monitoring. By implementing these strategies systematically, you can build highly performant and scalable Node.js applications.

Key takeaways:

  • Keep the event loop free with asynchronous operations
  • Use worker threads for CPU-intensive tasks
  • Implement proper caching strategies
  • Monitor memory usage and prevent leaks
  • Optimize database queries and use connection pooling
  • Use clustering for production deployments

Share this article

Click any platform to share • Preview shows how your content will appear