Q20: 如何优化 Node.js 应用的性能?

41 阅读5分钟

Node.js 面试题详细答案 - Q20

Q20: 如何优化 Node.js 应用的性能?

性能优化概述

Node.js 性能优化涉及多个方面,包括代码优化、内存管理、I/O 优化、缓存策略等。

代码优化

1. 避免阻塞操作
// 避免阻塞 - 使用异步操作
const fs = require('fs').promises

// 错误做法 - 阻塞
function readFileSync() {
  const data = fs.readFileSync('large-file.txt')
  return data
}

// 正确做法 - 非阻塞
async function readFileAsync() {
  const data = await fs.readFile('large-file.txt')
  return data
}

// 使用流处理大文件
const fs = require('fs')
const { Transform } = require('stream')

function processLargeFile() {
  const readStream = fs.createReadStream('large-file.txt')
  const writeStream = fs.createWriteStream('output.txt')

  const transformStream = new Transform({
    transform(chunk, encoding, callback) {
      // 处理数据块
      const processed = chunk.toString().toUpperCase()
      callback(null, processed)
    },
  })

  readStream.pipe(transformStream).pipe(writeStream)
}
2. 优化循环和算法
// 优化循环
function optimizedLoop() {
  const arr = new Array(1000000).fill(0).map((_, i) => i)

  // 错误做法 - 低效循环
  let sum = 0
  for (let i = 0; i < arr.length; i++) {
    sum += arr[i]
  }

  // 正确做法 - 使用 reduce
  const sum2 = arr.reduce((acc, val) => acc + val, 0)

  // 或者使用 for...of
  let sum3 = 0
  for (const val of arr) {
    sum3 += val
  }

  return { sum, sum2, sum3 }
}

// 优化算法复杂度
function findDuplicates(arr) {
  // O(n²) 复杂度
  const duplicates = []
  for (let i = 0; i < arr.length; i++) {
    for (let j = i + 1; j < arr.length; j++) {
      if (arr[i] === arr[j]) {
        duplicates.push(arr[i])
      }
    }
  }
  return duplicates
}

function findDuplicatesOptimized(arr) {
  // O(n) 复杂度
  const seen = new Set()
  const duplicates = new Set()

  for (const item of arr) {
    if (seen.has(item)) {
      duplicates.add(item)
    } else {
      seen.add(item)
    }
  }

  return Array.from(duplicates)
}

内存管理

1. 避免内存泄漏
// 避免内存泄漏
class EventEmitter {
  constructor() {
    this.listeners = new Map()
  }

  on(event, listener) {
    if (!this.listeners.has(event)) {
      this.listeners.set(event, [])
    }
    this.listeners.get(event).push(listener)
  }

  off(event, listener) {
    if (this.listeners.has(event)) {
      const listeners = this.listeners.get(event)
      const index = listeners.indexOf(listener)
      if (index > -1) {
        listeners.splice(index, 1)
      }
    }
  }

  emit(event, ...args) {
    if (this.listeners.has(event)) {
      this.listeners.get(event).forEach((listener) => {
        listener(...args)
      })
    }
  }
}

// 使用 WeakMap 避免内存泄漏
const cache = new WeakMap()

function getCachedData(obj) {
  if (cache.has(obj)) {
    return cache.get(obj)
  }

  const data = expensiveOperation(obj)
  cache.set(obj, data)
  return data
}
2. 垃圾回收优化
// 监控内存使用
function monitorMemory() {
  const used = process.memoryUsage()
  console.log('内存使用情况:')
  console.log(`RSS: ${Math.round(used.rss / 1024 / 1024)} MB`)
  console.log(`Heap Total: ${Math.round(used.heapTotal / 1024 / 1024)} MB`)
  console.log(`Heap Used: ${Math.round(used.heapUsed / 1024 / 1024)} MB`)
  console.log(`External: ${Math.round(used.external / 1024 / 1024)} MB`)
}

// 定期清理缓存
class CacheManager {
  constructor(maxSize = 1000, ttl = 3600000) {
    this.cache = new Map()
    this.maxSize = maxSize
    this.ttl = ttl
  }

  set(key, value) {
    if (this.cache.size >= this.maxSize) {
      const firstKey = this.cache.keys().next().value
      this.cache.delete(firstKey)
    }

    this.cache.set(key, {
      value,
      timestamp: Date.now(),
    })
  }

  get(key) {
    const item = this.cache.get(key)
    if (!item) return null

    if (Date.now() - item.timestamp > this.ttl) {
      this.cache.delete(key)
      return null
    }

    return item.value
  }

  cleanup() {
    const now = Date.now()
    for (const [key, item] of this.cache.entries()) {
      if (now - item.timestamp > this.ttl) {
        this.cache.delete(key)
      }
    }
  }
}

I/O 优化

1. 并发处理
// 并发处理多个请求
async function processConcurrentRequests(urls) {
  const promises = urls.map((url) => fetch(url))
  const results = await Promise.all(promises)
  return results
}

// 限制并发数量
async function processWithConcurrencyLimit(urls, limit = 5) {
  const results = []

  for (let i = 0; i < urls.length; i += limit) {
    const batch = urls.slice(i, i + limit)
    const batchPromises = batch.map((url) => fetch(url))
    const batchResults = await Promise.all(batchPromises)
    results.push(...batchResults)
  }

  return results
}

// 使用 Promise.allSettled 处理部分失败
async function processWithErrorHandling(urls) {
  const promises = urls.map((url) => fetch(url))
  const results = await Promise.allSettled(promises)

  const successful = results
    .filter((result) => result.status === 'fulfilled')
    .map((result) => result.value)

  const failed = results
    .filter((result) => result.status === 'rejected')
    .map((result) => result.reason)

  return { successful, failed }
}
2. 流式处理
// 流式处理大文件
const fs = require('fs')
const { Transform } = require('stream')

function processLargeFileStream(inputFile, outputFile) {
  const readStream = fs.createReadStream(inputFile)
  const writeStream = fs.createWriteStream(outputFile)

  const processStream = new Transform({
    transform(chunk, encoding, callback) {
      // 处理数据块
      const processed = chunk.toString().toUpperCase()
      callback(null, processed)
    },
  })

  readStream
    .pipe(processStream)
    .pipe(writeStream)
    .on('finish', () => {
      console.log('文件处理完成')
    })
    .on('error', (error) => {
      console.error('处理错误:', error)
    })
}

// 流式 JSON 处理
const { Transform } = require('stream')
const JSONStream = require('JSONStream')

function processLargeJSONFile(inputFile, outputFile) {
  const readStream = fs.createReadStream(inputFile)
  const writeStream = fs.createWriteStream(outputFile)

  const processStream = new Transform({
    objectMode: true,
    transform(chunk, encoding, callback) {
      // 处理每个 JSON 对象
      const processed = {
        ...chunk,
        processed: true,
        timestamp: new Date().toISOString(),
      }
      callback(null, processed)
    },
  })

  readStream
    .pipe(JSONStream.parse('*'))
    .pipe(processStream)
    .pipe(JSONStream.stringify())
    .pipe(writeStream)
}

缓存策略

1. 内存缓存
// LRU 缓存实现
class LRUCache {
  constructor(capacity) {
    this.capacity = capacity
    this.cache = new Map()
  }

  get(key) {
    if (this.cache.has(key)) {
      const value = this.cache.get(key)
      this.cache.delete(key)
      this.cache.set(key, value)
      return value
    }
    return null
  }

  set(key, value) {
    if (this.cache.has(key)) {
      this.cache.delete(key)
    } else if (this.cache.size >= this.capacity) {
      const firstKey = this.cache.keys().next().value
      this.cache.delete(firstKey)
    }
    this.cache.set(key, value)
  }
}

// 使用缓存
const cache = new LRUCache(100)

async function getCachedData(key) {
  let data = cache.get(key)
  if (!data) {
    data = await fetchDataFromDatabase(key)
    cache.set(key, data)
  }
  return data
}
2. Redis 缓存
const redis = require('redis')
const client = redis.createClient()

// 缓存装饰器
function cache(ttl = 3600) {
  return function (target, propertyName, descriptor) {
    const method = descriptor.value

    descriptor.value = async function (...args) {
      const key = `${propertyName}:${JSON.stringify(args)}`

      try {
        const cached = await client.get(key)
        if (cached) {
          return JSON.parse(cached)
        }

        const result = await method.apply(this, args)
        await client.setex(key, ttl, JSON.stringify(result))
        return result
      } catch (error) {
        console.error('缓存错误:', error)
        return method.apply(this, args)
      }
    }
  }
}

// 使用缓存装饰器
class UserService {
  @cache(1800) // 缓存 30 分钟
  async getUserById(id) {
    // 从数据库获取用户
    return await database.getUser(id)
  }
}

数据库优化

1. 连接池优化
const mysql = require('mysql2/promise')

// 优化连接池配置
const pool = mysql.createPool({
  host: 'localhost',
  user: 'root',
  password: 'password',
  database: 'test',
  waitForConnections: true,
  connectionLimit: 20,
  queueLimit: 0,
  acquireTimeout: 60000,
  timeout: 60000,
  reconnect: true,
  // 优化配置
  multipleStatements: false,
  dateStrings: true,
  supportBigNumbers: true,
  bigNumberStrings: true,
})

// 批量操作
async function batchInsert(records) {
  const values = records.map((record) => [
    record.name,
    record.email,
    record.age,
  ])

  const [result] = await pool.execute(
    'INSERT INTO users (name, email, age) VALUES ?',
    [values]
  )

  return result.affectedRows
}
2. 查询优化
// 使用索引
async function createIndexes() {
  await pool.execute('CREATE INDEX idx_user_email ON users(email)')
  await pool.execute('CREATE INDEX idx_user_age ON users(age)')
  await pool.execute('CREATE INDEX idx_user_created_at ON users(created_at)')
}

// 分页查询优化
async function getUsersPaginated(page = 1, limit = 10) {
  const offset = (page - 1) * limit

  // 使用游标分页(更高效)
  const [rows] = await pool.execute(
    'SELECT * FROM users WHERE id > ? ORDER BY id LIMIT ?',
    [offset, limit]
  )

  return rows
}

// 查询结果缓存
const queryCache = new Map()

async function cachedQuery(sql, params = []) {
  const key = `${sql}:${JSON.stringify(params)}`

  if (queryCache.has(key)) {
    return queryCache.get(key)
  }

  const [rows] = await pool.execute(sql, params)
  queryCache.set(key, rows)

  // 设置缓存过期时间
  setTimeout(() => {
    queryCache.delete(key)
  }, 300000) // 5分钟

  return rows
}

监控和调试

1. 性能监控
// 性能监控中间件
function performanceMiddleware(req, res, next) {
  const start = Date.now()

  res.on('finish', () => {
    const duration = Date.now() - start
    const memoryUsage = process.memoryUsage()

    console.log({
      method: req.method,
      url: req.url,
      status: res.statusCode,
      duration: `${duration}ms`,
      memory: {
        rss: `${Math.round(memoryUsage.rss / 1024 / 1024)}MB`,
        heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)}MB`,
      },
    })

    // 记录慢请求
    if (duration > 1000) {
      console.warn(`慢请求: ${req.method} ${req.url} - ${duration}ms`)
    }
  })

  next()
}

// CPU 使用率监控
function monitorCPU() {
  const startUsage = process.cpuUsage()

  setTimeout(() => {
    const currentUsage = process.cpuUsage(startUsage)
    const cpuPercent = (currentUsage.user + currentUsage.system) / 1000000

    console.log(`CPU 使用率: ${cpuPercent.toFixed(2)}%`)
  }, 1000)
}
2. 内存泄漏检测
// 内存泄漏检测
function detectMemoryLeaks() {
  const initialMemory = process.memoryUsage()

  setInterval(() => {
    const currentMemory = process.memoryUsage()
    const memoryIncrease = currentMemory.heapUsed - initialMemory.heapUsed

    if (memoryIncrease > 100 * 1024 * 1024) {
      // 100MB
      console.warn('可能的内存泄漏:', {
        initial: Math.round(initialMemory.heapUsed / 1024 / 1024),
        current: Math.round(currentMemory.heapUsed / 1024 / 1024),
        increase: Math.round(memoryIncrease / 1024 / 1024),
      })
    }
  }, 30000) // 每30秒检查一次
}

// 垃圾回收监控
if (global.gc) {
  setInterval(() => {
    const beforeGC = process.memoryUsage()
    global.gc()
    const afterGC = process.memoryUsage()

    console.log('垃圾回收:', {
      before: Math.round(beforeGC.heapUsed / 1024 / 1024),
      after: Math.round(afterGC.heapUsed / 1024 / 1024),
      freed: Math.round((beforeGC.heapUsed - afterGC.heapUsed) / 1024 / 1024),
    })
  }, 60000) // 每分钟执行一次
}

总结

  • 代码优化:避免阻塞操作,优化循环和算法
  • 内存管理:避免内存泄漏,优化垃圾回收
  • I/O 优化:并发处理,流式处理
  • 缓存策略:内存缓存,Redis 缓存
  • 数据库优化:连接池优化,查询优化
  • 监控调试:性能监控,内存泄漏检测
  • 最佳实践:使用工具监控,定期优化
  • 持续改进:根据监控数据持续优化