Node.js peut gérer des dizaines de milliers de connexions simultanées sur un seul serveur. Ce guide couvre le clustering, les streams, le profilage et les stratégies de mise en cache.
Comprendre la boucle d'événements
Node.js est monothread. Bloquer la boucle d'Ă©vĂ©nements bloque toutes les requĂȘtes.
// NEVER do this â blocks the event loop
app.get('/compute', (req, res) => {
// Synchronous CPU-heavy computation blocks ALL requests
let result = 0;
for (let i = 0; i < 1e9; i++) result += i; // 1 billion iterations!
res.json({ result });
});
// DO THIS instead â offload to worker thread
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');
app.get('/compute', (req, res) => {
const worker = new Worker('./computeWorker.js', {
workerData: { input: req.query.n }
});
worker.on('message', result => res.json({ result }));
worker.on('error', err => res.status(500).json({ error: err.message }));
});Clustering pour les performances multi-coeurs
Node.js fonctionne sur un seul coeur CPU par défaut. Le module cluster crée des processus enfants qui partagent le port du serveur.
// Node.js Cluster Module â Use All CPU Cores
const cluster = require('cluster');
const os = require('os');
const express = require('express');
const NUM_WORKERS = os.cpus().length;
if (cluster.isPrimary) {
console.log(`Primary ${process.pid} is running`);
console.log(`Starting ${NUM_WORKERS} workers...`);
// Fork workers
for (let i = 0; i < NUM_WORKERS; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`Worker ${worker.process.pid} died (${signal || code}). Restarting...`);
cluster.fork(); // Auto-restart crashed workers
});
cluster.on('online', (worker) => {
console.log(`Worker ${worker.process.pid} is online`);
});
} else {
// Worker process â runs the actual server
const app = express();
app.get('/api/users', async (req, res) => {
const users = await db.getUsers();
res.json(users);
});
app.listen(3000, () => {
console.log(`Worker ${process.pid} listening on port 3000`);
});
}
// Alternative: PM2 cluster mode (recommended for production)
// pm2 start server.js -i max # auto-detect CPU count
// pm2 start server.js -i 4 # explicit countStreams pour l'efficacité mémoire
Les streams permettent de traiter les données morceau par morceau sans tout charger en mémoire.
// Node.js Streams â Memory-Efficient Processing
const fs = require('fs');
const { Transform, pipeline } = require('stream');
const { promisify } = require('util');
const pipelineAsync = promisify(pipeline);
// 1. Stream a large file as HTTP response (no memory buffering)
app.get('/download/large-file', (req, res) => {
const filePath = './large-file.csv';
const stat = fs.statSync(filePath);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Length', stat.size);
res.setHeader('Content-Disposition', 'attachment; filename=data.csv');
// Pipe file directly to response â never fully in memory
fs.createReadStream(filePath).pipe(res);
});
// 2. Transform stream for CSV processing
class CsvParser extends Transform {
constructor() {
super({ objectMode: true });
this.buffer = '';
this.headers = null;
}
_transform(chunk, encoding, callback) {
this.buffer += chunk.toString();
const lines = this.buffer.split('\n');
this.buffer = lines.pop(); // Keep incomplete line in buffer
for (const line of lines) {
if (!this.headers) {
this.headers = line.split(',');
continue;
}
const values = line.split(',');
const record = {};
this.headers.forEach((h, i) => record[h.trim()] = values[i]?.trim());
this.push(record);
}
callback();
}
}
// 3. Pipeline for reliable error handling
async function processLargeCsvFile(inputPath, outputPath) {
await pipelineAsync(
fs.createReadStream(inputPath),
new CsvParser(),
new Transform({
objectMode: true,
transform(record, enc, cb) {
// Transform each record
record.processed = true;
cb(null, JSON.stringify(record) + '\n');
}
}),
fs.createWriteStream(outputPath)
);
console.log('Processing complete');
}Stratégies de mise en cache
La mise en cache est l'optimisation des performances Ă plus fort impact.
// Caching Strategies for Node.js
// 1. In-Memory LRU Cache
const { LRUCache } = require('lru-cache');
const cache = new LRUCache({
max: 500, // Maximum 500 items
ttl: 5 * 60 * 1000, // 5 minutes TTL
allowStale: true, // Return stale value while refreshing
updateAgeOnGet: true,
});
async function getUser(id) {
const cacheKey = `user:${id}`;
const cached = cache.get(cacheKey);
if (cached) return cached;
const user = await db.findUser(id);
cache.set(cacheKey, user);
return user;
}
// 2. Redis Cache with Stale-While-Revalidate
const Redis = require('ioredis');
const redis = new Redis();
async function getCachedData(key, fetchFn, ttl = 300) {
const [cached, ttlRemaining] = await redis.pipeline()
.get(key)
.ttl(key)
.exec();
if (cached[1]) {
const data = JSON.parse(cached[1]);
// Background refresh when < 60 seconds remaining
if (ttlRemaining[1] < 60) {
fetchFn().then(fresh =>
redis.setex(key, ttl, JSON.stringify(fresh))
);
}
return data;
}
const data = await fetchFn();
await redis.setex(key, ttl, JSON.stringify(data));
return data;
}
// 3. HTTP Response Caching with ETags
app.get('/api/products', async (req, res) => {
const products = await getProducts();
const etag = require('crypto')
.createHash('md5')
.update(JSON.stringify(products))
.digest('hex');
if (req.headers['if-none-match'] === etag) {
return res.status(304).end();
}
res.setHeader('ETag', etag);
res.setHeader('Cache-Control', 'public, max-age=60, stale-while-revalidate=300');
res.json(products);
});Questions fréquentes
Combien de workers de cluster créer ?
Créez un worker par coeur CPU, os.cpus().length workers.
Quand utiliser les streams vs charger en mémoire ?
Utilisez les streams pour les fichiers de plus de 10 Mo, le piping de données et le traitement incrémental.
Qu'est-ce que le flag --inspect et comment l'utiliser ?
Le flag --inspect démarre Node.js avec le protocole V8 inspector activé. Ouvrez chrome://inspect dans Chrome.
Pourquoi mon app Node.js utilise-t-elle autant de mémoire ?
Causes communes : fuites mémoire, caches sans éviction, grands ensembles de données en mémoire.