Skip to main content
Background Image

Node.js Performance Optimization: Complete Guide

·1610 words·8 mins· loading · loading · ·

🚀 Introduction to Node.js Performance
#

Node.js is built for high-performance, event-driven applications. However, without proper optimization, your applications can suffer from memory leaks, blocking operations, and poor scalability.

✨ Key Performance Areas
#

  • Memory Management: Prevent leaks and optimize usage
  • CPU Optimization: Efficient algorithms and async operations
  • I/O Operations: Non-blocking and streaming
  • Caching Strategies: Reduce redundant operations
  • Database Queries: Optimize data access patterns

💾 Memory Optimization
#

1. Memory Leak Prevention
#

// Bad: Memory leak with event listeners
class BadExample {
  constructor() {
    this.data = new Array(1000000).fill('data');
    setInterval(() => {
      this.processData();
    }, 1000);
  }
  
  processData() {
    // Processing without cleanup
  }
}

// Good: Proper cleanup
class GoodExample {
  constructor() {
    this.data = new Array(1000000).fill('data');
    this.interval = setInterval(() => {
      this.processData();
    }, 1000);
  }
  
  processData() {
    // Processing logic
  }
  
  destroy() {
    if (this.interval) {
      clearInterval(this.interval);
      this.interval = null;
    }
    this.data = null;
  }
}

2. Object Pool Pattern
#

class ObjectPool {
  constructor(createFn, resetFn, initialSize = 10) {
    this.createFn = createFn;
    this.resetFn = resetFn;
    this.pool = [];
    this.available = [];
    
    // Pre-populate pool
    for (let i = 0; i < initialSize; i++) {
      this.pool.push(this.createFn());
      this.available.push(i);
    }
  }
  
  acquire() {
    if (this.available.length === 0) {
      // Expand pool if needed
      const index = this.pool.length;
      this.pool.push(this.createFn());
      this.available.push(index);
    }
    
    const index = this.available.pop();
    return {
      object: this.pool[index],
      release: () => this.release(index)
    };
  }
  
  release(index) {
    this.resetFn(this.pool[index]);
    this.available.push(index);
  }
}

// Usage
const pool = new ObjectPool(
  () => ({ data: null, processed: false }),
  (obj) => { obj.data = null; obj.processed = false; }
);

const { object, release } = pool.acquire();
// Use object
release();

⚡ CPU Optimization
#

1. Worker Threads for CPU-Intensive Tasks
#

const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');

if (isMainThread) {
  // Main thread
  function processLargeDataset(data) {
    return new Promise((resolve, reject) => {
      const worker = new Worker(__filename, {
        workerData: data
      });
      
      worker.on('message', resolve);
      worker.on('error', reject);
      worker.on('exit', (code) => {
        if (code !== 0) reject(new Error(`Worker stopped with exit code ${code}`));
      });
    });
  }
  
  // Usage
  const largeData = Array.from({ length: 1000000 }, (_, i) => i);
  processLargeDataset(largeData).then(result => {
    console.log('Processing complete:', result);
  });
} else {
  // Worker thread
  const data = workerData;
  const result = data.map(x => x * x).reduce((a, b) => a + b, 0);
  parentPort.postMessage(result);
}

2. Efficient Algorithms
#

// Bad: O(n²) complexity
function findDuplicates(arr) {
  const duplicates = [];
  for (let i = 0; i < arr.length; i++) {
    for (let j = i + 1; j < arr.length; j++) {
      if (arr[i] === arr[j] && !duplicates.includes(arr[i])) {
        duplicates.push(arr[i]);
      }
    }
  }
  return duplicates;
}

// Good: O(n) complexity
function findDuplicatesOptimized(arr) {
  const seen = new Set();
  const duplicates = new Set();
  
  for (const item of arr) {
    if (seen.has(item)) {
      duplicates.add(item);
    } else {
      seen.add(item);
    }
  }
  
  return Array.from(duplicates);
}

🔄 Async Operations
#

1. Promise Pool for Concurrency Control
#

class PromisePool {
  constructor(concurrency = 5) {
    this.concurrency = concurrency;
    this.running = 0;
    this.queue = [];
  }
  
  async add(promiseFactory) {
    return new Promise((resolve, reject) => {
      this.queue.push({
        promiseFactory,
        resolve,
        reject
      });
      this.process();
    });
  }
  
  async process() {
    if (this.running >= this.concurrency || this.queue.length === 0) {
      return;
    }
    
    this.running++;
    const { promiseFactory, resolve, reject } = this.queue.shift();
    
    try {
      const result = await promiseFactory();
      resolve(result);
    } catch (error) {
      reject(error);
    } finally {
      this.running--;
      this.process();
    }
  }
}

// Usage
const pool = new PromisePool(3);

async function fetchUserData(userId) {
  return pool.add(async () => {
    const response = await fetch(`/api/users/${userId}`);
    return response.json();
  });
}

2. Streaming for Large Data
#

const { Transform, pipeline } = require('stream');
const fs = require('fs');

// Transform stream for processing large files
class DataProcessor extends Transform {
  constructor(options = {}) {
    super({ objectMode: true });
    this.processedCount = 0;
  }
  
  _transform(chunk, encoding, callback) {
    try {
      // Process each chunk
      const processed = this.processChunk(chunk);
      this.processedCount++;
      
      if (this.processedCount % 1000 === 0) {
        console.log(`Processed ${this.processedCount} records`);
      }
      
      callback(null, processed);
    } catch (error) {
      callback(error);
    }
  }
  
  processChunk(chunk) {
    // Your processing logic here
    return {
      ...chunk,
      processed: true,
      timestamp: Date.now()
    };
  }
}

// Usage
pipeline(
  fs.createReadStream('large-file.json'),
  JSONStream.parse('*'),
  new DataProcessor(),
  fs.createWriteStream('processed-file.json'),
  (err) => {
    if (err) {
      console.error('Pipeline failed:', err);
    } else {
      console.log('Pipeline succeeded');
    }
  }
);

🗄️ Database Optimization
#

1. Connection Pooling
#

const { Pool } = require('pg');

// Configure connection pool
const pool = new Pool({
  user: 'dbuser',
  host: 'localhost',
  database: 'mydb',
  password: 'secretpassword',
  port: 5432,
  max: 20, // Maximum number of clients in the pool
  idleTimeoutMillis: 30000, // Close idle clients after 30 seconds
  connectionTimeoutMillis: 2000, // Return an error after 2 seconds if connection could not be established
});

// Efficient query with prepared statements
async function getUsersByIds(userIds) {
  const query = 'SELECT * FROM users WHERE id = ANY($1)';
  const values = [userIds];
  
  try {
    const result = await pool.query(query, values);
    return result.rows;
  } catch (error) {
    console.error('Database query error:', error);
    throw error;
  }
}

// Batch operations
async function insertUsers(users) {
  const client = await pool.connect();
  
  try {
    await client.query('BEGIN');
    
    for (const user of users) {
      await client.query(
        'INSERT INTO users (name, email) VALUES ($1, $2)',
        [user.name, user.email]
      );
    }
    
    await client.query('COMMIT');
  } catch (error) {
    await client.query('ROLLBACK');
    throw error;
  } finally {
    client.release();
  }
}

2. Query Optimization
#

// Bad: N+1 query problem
async function getUsersWithPosts() {
  const users = await db.query('SELECT * FROM users');
  
  for (const user of users) {
    const posts = await db.query('SELECT * FROM posts WHERE user_id = $1', [user.id]);
    user.posts = posts;
  }
  
  return users;
}

// Good: Single query with JOIN
async function getUsersWithPostsOptimized() {
  const query = `
    SELECT 
      u.id, u.name, u.email,
      p.id as post_id, p.title, p.content, p.created_at
    FROM users u
    LEFT JOIN posts p ON u.id = p.user_id
    ORDER BY u.id, p.created_at DESC
  `;
  
  const result = await db.query(query);
  
  // Group posts by user
  const usersMap = new Map();
  
  for (const row of result.rows) {
    if (!usersMap.has(row.id)) {
      usersMap.set(row.id, {
        id: row.id,
        name: row.name,
        email: row.email,
        posts: []
      });
    }
    
    if (row.post_id) {
      usersMap.get(row.id).posts.push({
        id: row.post_id,
        title: row.title,
        content: row.content,
        created_at: row.created_at
      });
    }
  }
  
  return Array.from(usersMap.values());
}

🚀 Caching Strategies
#

1. In-Memory Caching
#

class MemoryCache {
  constructor(ttl = 60000) { // 1 minute default TTL
    this.cache = new Map();
    this.ttl = ttl;
  }
  
  set(key, value, customTtl) {
    const expiresAt = Date.now() + (customTtl || this.ttl);
    this.cache.set(key, { value, expiresAt });
  }
  
  get(key) {
    const item = this.cache.get(key);
    
    if (!item) {
      return null;
    }
    
    if (Date.now() > item.expiresAt) {
      this.cache.delete(key);
      return null;
    }
    
    return item.value;
  }
  
  delete(key) {
    this.cache.delete(key);
  }
  
  clear() {
    this.cache.clear();
  }
}

// Usage with caching middleware
const cache = new MemoryCache(300000); // 5 minutes

function withCache(keyGenerator, ttl) {
  return function(target, propertyName, descriptor) {
    const method = descriptor.value;
    
    descriptor.value = async function(...args) {
      const key = keyGenerator(...args);
      const cached = cache.get(key);
      
      if (cached) {
        return cached;
      }
      
      const result = await method.apply(this, args);
      cache.set(key, result, ttl);
      
      return result;
    };
  };
}

// Usage
class UserService {
  @withCache(
    (id) => `user:${id}`,
    300000 // 5 minutes
  )
  async getUserById(id) {
    // Expensive database operation
    return await db.query('SELECT * FROM users WHERE id = $1', [id]);
  }
}

2. Redis Caching
#

const redis = require('redis');
const client = redis.createClient();

class RedisCache {
  constructor(ttl = 3600) { // 1 hour default TTL
    this.ttl = ttl;
  }
  
  async set(key, value, customTtl) {
    const serialized = JSON.stringify(value);
    const ttl = customTtl || this.ttl;
    
    await client.setex(key, ttl, serialized);
  }
  
  async get(key) {
    const serialized = await client.get(key);
    
    if (!serialized) {
      return null;
    }
    
    try {
      return JSON.parse(serialized);
    } catch (error) {
      console.error('Cache deserialization error:', error);
      return null;
    }
  }
  
  async delete(key) {
    await client.del(key);
  }
  
  async clear(pattern = '*') {
    const keys = await client.keys(pattern);
    if (keys.length > 0) {
      await client.del(keys);
    }
  }
}

📊 Monitoring and Profiling
#

1. Performance Monitoring
#

const { performance, PerformanceObserver } = require('perf_hooks');

// Custom performance observer
const obs = new PerformanceObserver((list) => {
  const entries = list.getEntries();
  entries.forEach((entry) => {
    console.log(`${entry.name}: ${entry.duration}ms`);
  });
});

obs.observe({ entryTypes: ['measure'] });

// Performance measurement decorator
function measurePerformance(name) {
  return function(target, propertyName, descriptor) {
    const method = descriptor.value;
    
    descriptor.value = async function(...args) {
      const start = performance.now();
      const result = await method.apply(this, args);
      const end = performance.now();
      
      performance.mark(`${name}-start`);
      performance.mark(`${name}-end`);
      performance.measure(name, `${name}-start`, `${name}-end`);
      
      console.log(`${name} took ${end - start} milliseconds`);
      
      return result;
    };
  };
}

// Usage
class ApiService {
  @measurePerformance('fetchUserData')
  async fetchUserData(userId) {
    // API call implementation
  }
}

2. Memory Monitoring
#

function logMemoryUsage() {
  const used = process.memoryUsage();
  console.log({
    rss: `${Math.round(used.rss / 1024 / 1024)} MB`,
    heapTotal: `${Math.round(used.heapTotal / 1024 / 1024)} MB`,
    heapUsed: `${Math.round(used.heapUsed / 1024 / 1024)} MB`,
    external: `${Math.round(used.external / 1024 / 1024)} MB`
  });
}

// Monitor memory every 30 seconds
setInterval(logMemoryUsage, 30000);

// Force garbage collection (for testing only)
if (global.gc) {
  setInterval(() => {
    global.gc();
    console.log('Garbage collection triggered');
  }, 60000);
}

🎯 Best Practices Summary
#

  1. Use streaming for large data - Process data in chunks to avoid memory issues
  2. Implement proper error handling - Prevent crashes and ensure graceful degradation
  3. Use connection pooling - Efficiently manage database connections
  4. Cache frequently accessed data - Reduce redundant operations
  5. Monitor performance metrics - Identify bottlenecks and optimize accordingly
  6. Use worker threads for CPU-intensive tasks - Keep the main thread responsive
  7. Optimize database queries - Use proper indexing and avoid N+1 queries
  8. Implement rate limiting - Protect your application from abuse

🎉 Conclusion
#

Node.js performance optimization is an ongoing process that requires understanding your application’s specific needs and bottlenecks. By implementing these techniques and continuously monitoring your application’s performance, you can build highly scalable and efficient Node.js applications.

Remember to:

  • Profile before optimizing
  • Measure the impact of changes
  • Consider the trade-offs of each optimization
  • Keep your code readable and maintainable

Happy optimizing! 🚀