Koa performance optimization is key to building high-performance applications. Through proper configuration and optimization strategies, you can significantly improve application response speed and concurrent processing capabilities.
1. Middleware optimization:
Reduce middleware count:
javascript// Bad practice: too many middleware app.use(middleware1); app.use(middleware2); app.use(middleware3); app.use(middleware4); app.use(middleware5); // Good practice: merge similar middleware app.use(compose([middleware1, middleware2])); app.use(compose([middleware3, middleware4]));
Optimize middleware order:
javascript// Place frequently used middleware first app.use(loggerMiddleware); // Logging app.use(corsMiddleware); // CORS app.use(bodyParserMiddleware); // Parse request body app.use(authMiddleware); // Authentication app.use(routerMiddleware); // Routing
Avoid time-consuming operations in middleware:
javascript// Bad practice: database query in middleware app.use(async (ctx, next) => { const user = await User.findById(ctx.session.userId); ctx.state.user = user; await next(); }); // Good practice: load data on demand app.use(async (ctx, next) => { await next(); // Load data when needed });
2. Async processing optimization:
Use Promise.all for parallel processing:
javascript// Bad practice: serial execution app.use(async (ctx) => { const user = await User.findById(id); const posts = await Post.findByUserId(id); const comments = await Comment.findByUserId(id); ctx.body = { user, posts, comments }; }); // Good practice: parallel execution app.use(async (ctx) => { const [user, posts, comments] = await Promise.all([ User.findById(id), Post.findByUserId(id), Comment.findByUserId(id) ]); ctx.body = { user, posts, comments }; });
Use caching to reduce repeated queries:
javascriptconst cache = new Map(); app.use(async (ctx) => { const cacheKey = `user:${ctx.params.id}`; // Check cache if (cache.has(cacheKey)) { ctx.body = cache.get(cacheKey); return; } // Query database const user = await User.findById(ctx.params.id); cache.set(cacheKey, user); ctx.body = user; });
3. Database connection pool optimization:
javascriptconst { Pool } = require('pg'); // Configure connection pool const pool = new Pool({ host: 'localhost', port: 5432, database: 'mydb', user: 'user', password: 'password', max: 20, // Max connections min: 5, // Min connections idleTimeoutMillis: 30000, connectionTimeoutMillis: 2000 }); app.use(async (ctx, next) => { const client = await pool.connect(); ctx.state.db = client; try { await next(); } finally { client.release(); } });
4. Response compression:
Use koa-compress middleware to compress responses.
bashnpm install koa-compress
javascriptconst compress = require('koa-compress'); app.use(compress({ filter: (contentType) => { return /text/i.test(contentType); }, threshold: 2048, // Compress when > 2KB gzip: { flush: require('zlib').constants.Z_SYNC_FLUSH }, deflate: { flush: require('zlib').constants.Z_SYNC_FLUSH }, br: false // Disable brotli }));
5. Static resource optimization:
javascriptconst serve = require('koa-static'); const { createGzip } = require('zlib'); const { createReadStream, createWriteStream } = require('fs'); // Configure static resource serving app.use(serve('./public', { maxage: 365 * 24 * 60 * 60 * 1000, // 1 year cache gzip: true, brotli: true })); // Pre-compress static resources function precompressStaticFiles() { const files = fs.readdirSync('./public'); files.forEach(file => { if (file.endsWith('.js') || file.endsWith('.css')) { const filePath = path.join('./public', file); const gzipPath = filePath + '.gz'; const readStream = createReadStream(filePath); const writeStream = createWriteStream(gzipPath); const gzip = createGzip(); readStream.pipe(gzip).pipe(writeStream); } }); }
6. HTTP/2 support:
javascriptconst http2 = require('http2'); const fs = require('fs'); const server = http2.createSecureServer({ key: fs.readFileSync('server.key'), cert: fs.readFileSync('server.crt') }, app.callback()); server.listen(3000);
7. Cluster mode:
Use Node.js cluster module to fully utilize multi-core CPU.
javascriptconst cluster = require('cluster'); const numCPUs = require('os').cpus().length; if (cluster.isMaster) { console.log(`Master ${process.pid} is running`); for (let i = 0; i < numCPUs; i++) { cluster.fork(); } cluster.on('exit', (worker, code, signal) => { console.log(`Worker ${worker.process.pid} died`); cluster.fork(); }); } else { app.listen(3000); console.log(`Worker ${process.pid} started`); }
8. Monitoring and performance analysis:
javascriptconst prometheus = require('prom-client'); // Create metrics const httpRequestDuration = new prometheus.Histogram({ name: 'http_request_duration_seconds', help: 'Duration of HTTP requests in seconds', labelNames: ['method', 'route', 'code'] }); // Middleware to collect metrics app.use(async (ctx, next) => { const start = Date.now(); await next(); const duration = (Date.now() - start) / 1000; httpRequestDuration.observe( { method: ctx.method, route: ctx.path, code: ctx.status }, duration ); }); // Metrics endpoint app.use(async (ctx) => { if (ctx.path === '/metrics') { ctx.set('Content-Type', prometheus.register.contentType); ctx.body = await prometheus.register.metrics(); } });
9. Code splitting and lazy loading:
javascript// Lazy load routes app.use(async (ctx, next) => { if (ctx.path.startsWith('/admin')) { const adminRoutes = await import('./routes/admin'); adminRoutes.default.routes()(ctx, next); } else if (ctx.path.startsWith('/api')) { const apiRoutes = await import('./routes/api'); apiRoutes.default.routes()(ctx, next); } else { await next(); } });
10. Performance optimization best practices:
-
Middleware optimization:
- Reduce middleware count
- Merge similar middleware
- Optimize middleware execution order
- Avoid time-consuming operations in middleware
-
Async processing:
- Use Promise.all for parallel processing
- Implement caching mechanisms
- Use connection pools for database connections
- Avoid blocking the main thread
-
Resource optimization:
- Compress response content
- Enable static resource caching
- Use CDN acceleration
- Pre-compress static resources
-
Architecture optimization:
- Use cluster mode
- Implement load balancing
- Use HTTP/2
- Implement microservices architecture
-
Monitoring and tuning:
- Implement performance monitoring
- Use performance analysis tools
- Regular performance testing
- Continuous optimization and improvement