乐闻世界logo
搜索文章和话题

How to optimize Promise performance?

2月22日 14:07

Promise performance optimization is key to improving application response speed and user experience. By properly using Promises and related techniques, you can significantly improve the efficiency of asynchronous operations.

Avoid Unnecessary Promise Wrapping

Problem Example

javascript
// Not recommended: unnecessary Promise wrapping function fetchData() { return new Promise((resolve) => { resolve(fetch('/api/data')); }); } // Recommended: return Promise directly function fetchData() { return fetch('/api/data'); }

Optimization Reasons

Unnecessary Promise wrapping adds extra overhead, including:

  • Creating new Promise objects
  • Additional microtask scheduling
  • Increased memory usage

Execute Independent Operations in Parallel

Sequential Execution (Slow)

javascript
// Not recommended: sequential execution async function fetchAllData() { const user = await fetchUser(); const posts = await fetchPosts(); const comments = await fetchComments(); return { user, posts, comments }; }

Parallel Execution (Fast)

javascript
// Recommended: parallel execution async function fetchAllData() { const [user, posts, comments] = await Promise.all([ fetchUser(), fetchPosts(), fetchComments() ]); return { user, posts, comments }; }

Performance Comparison

Assuming each request takes 100ms:

  • Sequential execution: 300ms
  • Parallel execution: 100ms (3x improvement)

Avoid Excessively Long Promise Chains

Problem Example

javascript
// Not recommended: excessively long Promise chain function processLargeData(data) { return Promise.resolve(data) .then(data => processData1(data)) .then(data => processData2(data)) .then(data => processData3(data)) .then(data => processData4(data)) .then(data => processData5(data)) .then(data => processData6(data)) .then(data => processData7(data)) .then(data => processData8(data)); }

Optimization Solution

javascript
// Recommended: use async/await async function processLargeData(data) { data = await processData1(data); data = await processData2(data); data = await processData3(data); data = await processData4(data); data = await processData5(data); data = await processData6(data); data = await processData7(data); data = await processData8(data); return data; } // Or: use function composition function processLargeData(data) { return [processData1, processData2, processData3, processData4, processData5, processData6, processData7, processData8] .reduce((promise, processor) => promise.then(processor), Promise.resolve(data) ); }

Use Caching Appropriately

Basic Cache Implementation

javascript
const cache = new Map(); function fetchWithCache(url) { if (cache.has(url)) { return Promise.resolve(cache.get(url)); } return fetch(url) .then(response => response.json()) .then(data => { cache.set(url, data); return data; }); }

Cache with Expiration Time

javascript
class PromiseCache { constructor(ttl = 60000) { this.cache = new Map(); this.ttl = ttl; } get(key) { const item = this.cache.get(key); if (!item) return null; if (Date.now() > item.expiry) { this.cache.delete(key); return null; } return item.value; } set(key, value) { this.cache.set(key, { value, expiry: Date.now() + this.ttl }); } async fetch(key, fetcher) { const cached = this.get(key); if (cached) return cached; const value = await fetcher(); this.set(key, value); return value; } } // Usage example const cache = new PromiseCache(60000); async function fetchUser(id) { return cache.fetch(`user:${id}`, () => fetch(`/api/users/${id}`).then(r => r.json()) ); }

Request Deduplication

Basic Deduplication Implementation

javascript
const pendingRequests = new Map(); function fetchDeduplicated(url) { if (pendingRequests.has(url)) { return pendingRequests.get(url); } const promise = fetch(url) .then(response => response.json()) .finally(() => { pendingRequests.delete(url); }); pendingRequests.set(url, promise); return promise; }

Complete Deduplication Implementation

javascript
class RequestDeduplicator { constructor() { this.pendingRequests = new Map(); } async fetch(url, options = {}) { const key = this.getRequestKey(url, options); if (this.pendingRequests.has(key)) { return this.pendingRequests.get(key); } const promise = fetch(url, options) .then(response => { if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } return response.json(); }) .finally(() => { this.pendingRequests.delete(key); }); this.pendingRequests.set(key, promise); return promise; } getRequestKey(url, options) { return JSON.stringify({ url, options }); } } // Usage example const deduplicator = new RequestDeduplicator(); // Multiple calls to same URL, only one request will be made Promise.all([ deduplicator.fetch('/api/user'), deduplicator.fetch('/api/user'), deduplicator.fetch('/api/user') ]).then(results => { console.log('All requests return same result:', results); });

Batch Processing

Problem Example

javascript
// Not recommended: process one by one async function processItems(items) { const results = []; for (const item of items) { const result = await processItem(item); results.push(result); } return results; }

Optimization Solution

javascript
// Recommended: batch processing async function processItems(items, batchSize = 10) { const results = []; for (let i = 0; i < items.length; i += batchSize) { const batch = items.slice(i, i + batchSize); const batchResults = await Promise.all( batch.map(item => processItem(item)) ); results.push(...batchResults); } return results; }

Error Handling Optimization

Avoid Overusing try/catch

javascript
// Not recommended: overusing try/catch async function fetchData() { try { try { const response = await fetch('/api/data'); try { const data = await response.json(); try { const processed = await processData(data); return processed; } catch (error) { console.error('Processing failed:', error); } } catch (error) { console.error('Parsing failed:', error); } } catch (error) { console.error('Request failed:', error); } } catch (error) { console.error('Unknown error:', error); } }

Optimization Solution

javascript
// Recommended: reasonable error handling async function fetchData() { try { const response = await fetch('/api/data'); const data = await response.json(); return await processData(data); } catch (error) { if (error instanceof NetworkError) { console.error('Network error:', error.message); } else if (error instanceof ParseError) { console.error('Parsing error:', error.message); } else { console.error('Unknown error:', error); } throw error; } }

Memory Optimization

Avoid Memory Leaks

javascript
// Not recommended: may cause memory leaks class DataFetcher { constructor() { this.cache = new Map(); } async fetch(url) { if (this.cache.has(url)) { return this.cache.get(url); } const data = await fetch(url).then(r => r.json()); this.cache.set(url, data); return data; } }

Optimization Solution

javascript
// Recommended: use WeakMap or limit cache size class DataFetcher { constructor(maxSize = 100) { this.cache = new Map(); this.maxSize = maxSize; } async fetch(url) { if (this.cache.has(url)) { return this.cache.get(url); } const data = await fetch(url).then(r => r.json()); // Limit cache size if (this.cache.size >= this.maxSize) { const firstKey = this.cache.keys().next().value; this.cache.delete(firstKey); } this.cache.set(url, data); return data; } }

Performance Monitoring

Monitor Promise Execution Time

javascript
function withPerformanceTracking(promise, label) { const startTime = performance.now(); return promise .then(result => { const duration = performance.now() - startTime; console.log(`${label} completed, duration: ${duration.toFixed(2)}ms`); return result; }) .catch(error => { const duration = performance.now() - startTime; console.error(`${label} failed, duration: ${duration.toFixed(2)}ms`, error); throw error; }); } // Usage example async function fetchData() { return withPerformanceTracking( fetch('/api/data').then(r => r.json()), 'fetchData' ); }

Monitor Concurrent Requests

javascript
class RequestMonitor { constructor() { this.activeRequests = 0; this.maxConcurrentRequests = 0; } async monitor(promise) { this.activeRequests++; this.maxConcurrentRequests = Math.max( this.activeRequests, this.maxConcurrentRequests ); try { return await promise; } finally { this.activeRequests--; } } getStats() { return { activeRequests: this.activeRequests, maxConcurrentRequests: this.maxConcurrentRequests }; } } // Usage example const monitor = new RequestMonitor(); async function fetchWithMonitor(url) { return monitor.monitor(fetch(url)); }

Summary

  1. Avoid unnecessary Promise wrapping: Reduce extra overhead
  2. Execute independent operations in parallel: Use Promise.all to improve performance
  3. Avoid excessively long Promise chains: Use async/await to improve readability
  4. Use caching appropriately: Reduce duplicate requests
  5. Implement request deduplication: Avoid duplicate network requests
  6. Batch process data: Improve processing efficiency
  7. Optimize error handling: Avoid overly nested try/catch
  8. Pay attention to memory management: Avoid memory leaks
  9. Monitor performance metrics: Detect performance issues in time
  10. Choose appropriate concurrency: Adjust concurrency strategy based on actual conditions
标签:Promise