I’ve spent years working with JavaScript’s asynchronous capabilities, and I can confidently say that mastering these patterns has transformed how I approach modern application development. The evolution from callback hell to elegant async/await syntax represents one of the most significant improvements in JavaScript’s history.
Promise.all() for Parallel Execution
When building applications that need to fetch data from multiple sources simultaneously, Promise.all() becomes indispensable. I frequently use this pattern when loading dashboard data or aggregating information from various APIs.
async function loadDashboardData() {
const [userProfile, notifications, analytics, settings] = await Promise.all([
fetchUserProfile(),
fetchNotifications(),
fetchAnalytics(),
fetchUserSettings()
]);
return {
userProfile,
notifications,
analytics,
settings
};
}
// Sequential approach (slower)
async function loadDashboardDataSequential() {
const userProfile = await fetchUserProfile();
const notifications = await fetchNotifications();
const analytics = await fetchAnalytics();
const settings = await fetchUserSettings();
return { userProfile, notifications, analytics, settings };
}
The parallel approach reduces loading time from the cumulative duration of all requests to the duration of the longest single request. In my experience, this often cuts loading times by 60-80% for dashboard-style interfaces.
Promise.allSettled() for Fault Tolerance
Real-world applications must handle partial failures gracefully. Promise.allSettled() allows me to continue processing even when some operations fail, which proves essential for non-critical data loading scenarios.
async function loadOptionalData() {
const results = await Promise.allSettled([
fetchCriticalData(),
fetchOptionalFeatures(),
fetchRecommendations(),
fetchAdvertisements()
]);
const data = {};
results.forEach((result, index) => {
if (result.status === 'fulfilled') {
const keys = ['critical', 'features', 'recommendations', 'ads'];
data[keys[index]] = result.value;
} else {
console.warn(`Failed to load ${index}:`, result.reason);
}
});
return data;
}
This pattern ensures that users receive available content immediately rather than facing complete loading failures when non-essential services experience issues.
Async Generators for Streaming Data
Async generators have revolutionized how I handle large datasets and streaming operations. They provide memory-efficient processing of data that arrives over time.
async function* streamLogFiles(filePaths) {
for (const filePath of filePaths) {
try {
const stream = createReadStream(filePath);
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const lines = value.toString().split('\n');
for (const line of lines) {
if (line.trim()) {
yield JSON.parse(line);
}
}
}
} catch (error) {
console.error(`Error processing ${filePath}:`, error);
}
}
}
// Process streaming logs without memory overflow
async function processLogs() {
const logFiles = ['app.log', 'error.log', 'access.log'];
for await (const logEntry of streamLogFiles(logFiles)) {
await processLogEntry(logEntry);
}
}
This approach handles gigabyte-sized log files without consuming excessive memory, making it perfect for data analysis and monitoring applications.
Concurrent Iteration with for-await-of
The for-await-of loop provides clean syntax for processing asynchronous iterables while maintaining proper error handling and sequential processing guarantees.
async function processUserActions(userIds) {
const results = [];
for await (const userId of userIds) {
try {
const actions = await fetchUserActions(userId);
const processed = await processActions(actions);
results.push({ userId, processed });
} catch (error) {
results.push({ userId, error: error.message });
}
}
return results;
}
// With async generator for pagination
async function* fetchAllUsers() {
let page = 1;
while (true) {
const response = await fetch(`/api/users?page=${page}`);
const data = await response.json();
if (data.users.length === 0) break;
for (const user of data.users) {
yield user;
}
page++;
}
}
async function processAllUsers() {
for await (const user of fetchAllUsers()) {
await updateUserStatus(user.id);
}
}
Promise Racing for Competitive Operations
Promise.race() excels in scenarios where I need the fastest response or want to implement timeout patterns. I use this frequently for performance optimization and user experience improvements.
async function fetchWithTimeout(url, timeoutMs = 5000) {
const fetchPromise = fetch(url);
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Request timeout')), timeoutMs);
});
return Promise.race([fetchPromise, timeoutPromise]);
}
async function getDataFromMultipleSources(urls) {
const promises = urls.map(url =>
fetchWithTimeout(url, 3000).catch(error => ({ error: error.message }))
);
// Get the first successful response
return Promise.race(promises.filter(p => !p.error));
}
// Implement circuit breaker pattern
class CircuitBreaker {
constructor(threshold = 5, timeout = 60000) {
this.failureCount = 0;
this.threshold = threshold;
this.timeout = timeout;
this.state = 'CLOSED';
this.nextAttempt = Date.now();
}
async execute(operation) {
if (this.state === 'OPEN') {
if (Date.now() < this.nextAttempt) {
throw new Error('Circuit breaker is OPEN');
}
this.state = 'HALF_OPEN';
}
try {
const result = await Promise.race([
operation(),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Operation timeout')), 5000)
)
]);
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
onSuccess() {
this.failureCount = 0;
this.state = 'CLOSED';
}
onFailure() {
this.failureCount++;
if (this.failureCount >= this.threshold) {
this.state = 'OPEN';
this.nextAttempt = Date.now() + this.timeout;
}
}
}
Queue Management for Controlled Concurrency
Managing concurrent operations prevents overwhelming external services and helps maintain application stability. I implement queue systems to control request rates and resource usage.
class AsyncQueue {
constructor(concurrency = 3) {
this.concurrency = concurrency;
this.running = 0;
this.queue = [];
}
async add(asyncFunction, priority = 0) {
return new Promise((resolve, reject) => {
const task = {
asyncFunction,
resolve,
reject,
priority
};
// Insert based on priority
const insertIndex = this.queue.findIndex(t => t.priority < priority);
if (insertIndex === -1) {
this.queue.push(task);
} else {
this.queue.splice(insertIndex, 0, task);
}
this.process();
});
}
async process() {
if (this.running >= this.concurrency || this.queue.length === 0) {
return;
}
this.running++;
const task = this.queue.shift();
try {
const result = await task.asyncFunction();
task.resolve(result);
} catch (error) {
task.reject(error);
} finally {
this.running--;
this.process();
}
}
get stats() {
return {
running: this.running,
queued: this.queue.length,
concurrency: this.concurrency
};
}
}
// Usage for API requests
const apiQueue = new AsyncQueue(2);
async function processUserUploads(files) {
const uploadPromises = files.map((file, index) =>
apiQueue.add(
() => uploadFile(file),
files.length - index // Higher priority for later files
)
);
return Promise.allSettled(uploadPromises);
}
Async Pipeline Processing
Pipeline patterns create maintainable data transformation chains where each step processes the output of the previous step asynchronously.
class AsyncPipeline {
constructor() {
this.steps = [];
}
pipe(asyncFunction) {
this.steps.push(asyncFunction);
return this;
}
async execute(input) {
let result = input;
for (const step of this.steps) {
try {
result = await step(result);
} catch (error) {
throw new Error(`Pipeline failed at step: ${error.message}`);
}
}
return result;
}
}
// Create data processing pipeline
const dataPipeline = new AsyncPipeline()
.pipe(async (data) => validateInput(data))
.pipe(async (data) => enrichWithExternalData(data))
.pipe(async (data) => transformFormat(data))
.pipe(async (data) => saveToDatabase(data));
async function processIncomingData(rawData) {
try {
return await dataPipeline.execute(rawData);
} catch (error) {
console.error('Pipeline processing failed:', error);
throw error;
}
}
Batch Processing for Efficiency
Batch processing optimizes performance by grouping operations and reducing overhead from individual requests or database transactions.
class BatchProcessor {
constructor(batchSize = 10, flushInterval = 1000) {
this.batchSize = batchSize;
this.flushInterval = flushInterval;
this.batch = [];
this.timer = null;
this.processing = false;
}
async add(item) {
this.batch.push(item);
if (this.batch.length >= this.batchSize) {
await this.flush();
} else if (!this.timer) {
this.timer = setTimeout(() => this.flush(), this.flushInterval);
}
}
async flush() {
if (this.processing || this.batch.length === 0) {
return;
}
this.processing = true;
const currentBatch = this.batch.splice(0);
if (this.timer) {
clearTimeout(this.timer);
this.timer = null;
}
try {
await this.processBatch(currentBatch);
} catch (error) {
console.error('Batch processing failed:', error);
// Could implement retry logic here
} finally {
this.processing = false;
}
}
async processBatch(items) {
// Process items in chunks
const chunks = this.createChunks(items, 5);
for (const chunk of chunks) {
await Promise.all(chunk.map(item => this.processItem(item)));
}
}
createChunks(array, chunkSize) {
const chunks = [];
for (let i = 0; i < array.length; i += chunkSize) {
chunks.push(array.slice(i, i + chunkSize));
}
return chunks;
}
async processItem(item) {
// Implement actual processing logic
return await saveItemToDatabase(item);
}
}
// Usage for event processing
const eventProcessor = new BatchProcessor(20, 2000);
async function handleUserEvent(event) {
await eventProcessor.add(event);
}
Retry Logic with Exponential Backoff
Robust applications require sophisticated retry mechanisms that handle transient failures gracefully while avoiding overwhelming failing services.
class RetryHandler {
constructor(maxRetries = 3, baseDelay = 1000, maxDelay = 30000) {
this.maxRetries = maxRetries;
this.baseDelay = baseDelay;
this.maxDelay = maxDelay;
}
async execute(operation, context = {}) {
let lastError;
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error;
if (attempt === this.maxRetries) {
break;
}
if (!this.shouldRetry(error)) {
throw error;
}
const delay = this.calculateDelay(attempt);
console.warn(`Attempt ${attempt + 1} failed, retrying in ${delay}ms:`, error.message);
await this.delay(delay);
}
}
throw new Error(`Operation failed after ${this.maxRetries + 1} attempts: ${lastError.message}`);
}
shouldRetry(error) {
// Don't retry on client errors (4xx) but retry on server errors (5xx)
if (error.response) {
return error.response.status >= 500;
}
// Retry on network errors
return error.code === 'ECONNRESET' ||
error.code === 'ETIMEDOUT' ||
error.message.includes('network');
}
calculateDelay(attempt) {
const exponentialDelay = this.baseDelay * Math.pow(2, attempt);
const jitter = Math.random() * 0.1 * exponentialDelay;
return Math.min(exponentialDelay + jitter, this.maxDelay);
}
delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
// Usage with different strategies
const networkRetry = new RetryHandler(5, 500, 10000);
const databaseRetry = new RetryHandler(3, 2000, 15000);
async function fetchCriticalData(url) {
return networkRetry.execute(async () => {
const response = await fetch(url);
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
return response.json();
});
}
Async Resource Pooling
Resource pooling manages expensive resources like database connections efficiently, ensuring optimal performance and preventing resource exhaustion.
class AsyncResourcePool {
constructor(createResource, destroyResource, maxSize = 10) {
this.createResource = createResource;
this.destroyResource = destroyResource;
this.maxSize = maxSize;
this.available = [];
this.inUse = new Set();
this.waiting = [];
}
async acquire() {
if (this.available.length > 0) {
const resource = this.available.pop();
this.inUse.add(resource);
return resource;
}
if (this.inUse.size < this.maxSize) {
const resource = await this.createResource();
this.inUse.add(resource);
return resource;
}
// Wait for available resource
return new Promise((resolve) => {
this.waiting.push(resolve);
});
}
release(resource) {
if (!this.inUse.has(resource)) {
throw new Error('Resource not in use');
}
this.inUse.delete(resource);
if (this.waiting.length > 0) {
const resolve = this.waiting.shift();
this.inUse.add(resource);
resolve(resource);
} else {
this.available.push(resource);
}
}
async execute(operation) {
const resource = await this.acquire();
try {
return await operation(resource);
} finally {
this.release(resource);
}
}
async drain() {
// Wait for all resources to be released
while (this.inUse.size > 0) {
await new Promise(resolve => setTimeout(resolve, 100));
}
// Destroy all available resources
for (const resource of this.available) {
await this.destroyResource(resource);
}
this.available.length = 0;
}
get stats() {
return {
available: this.available.length,
inUse: this.inUse.size,
waiting: this.waiting.length,
total: this.available.length + this.inUse.size
};
}
}
// Database connection pool example
const dbPool = new AsyncResourcePool(
async () => createDatabaseConnection(),
async (connection) => connection.close(),
5
);
async function queryDatabase(sql, params) {
return dbPool.execute(async (connection) => {
return connection.query(sql, params);
});
}
Streaming Data Processing
Processing large datasets efficiently requires streaming approaches that handle data in chunks rather than loading everything into memory.
async function* processLargeDataset(dataSource) {
const chunkSize = 1000;
let offset = 0;
while (true) {
const chunk = await dataSource.getChunk(offset, chunkSize);
if (chunk.length === 0) {
break;
}
// Process chunk asynchronously
const processed = await Promise.all(
chunk.map(async (item) => {
return await transformDataItem(item);
})
);
yield processed;
offset += chunkSize;
}
}
// Stream processor with backpressure control
class StreamProcessor {
constructor(concurrency = 3) {
this.concurrency = concurrency;
this.processing = 0;
this.backpressure = false;
}
async processStream(asyncIterable, processor) {
const results = [];
for await (const item of asyncIterable) {
while (this.processing >= this.concurrency) {
await this.waitForSlot();
}
this.processing++;
processor(item)
.then(result => results.push(result))
.catch(error => console.error('Processing error:', error))
.finally(() => {
this.processing--;
this.backpressure = false;
});
}
// Wait for all processing to complete
while (this.processing > 0) {
await this.waitForSlot();
}
return results;
}
async waitForSlot() {
this.backpressure = true;
while (this.backpressure) {
await new Promise(resolve => setTimeout(resolve, 10));
}
}
}
// Usage for file processing
async function processLargeFile(filePath) {
const processor = new StreamProcessor(5);
const fileStream = async function* () {
const stream = createReadStream(filePath);
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
yield value;
}
};
return processor.processStream(fileStream(), async (chunk) => {
return await processFileChunk(chunk);
});
}
AbortController Integration
Modern applications require proper cancellation support for long-running operations to improve user experience and resource management.
class CancellableOperation {
constructor() {
this.operations = new Map();
}
async execute(operationId, asyncOperation, timeoutMs = 30000) {
const controller = new AbortController();
const { signal } = controller;
// Store for potential cancellation
this.operations.set(operationId, controller);
try {
// Setup timeout
const timeoutId = setTimeout(() => {
controller.abort();
}, timeoutMs);
const result = await Promise.race([
asyncOperation(signal),
this.createAbortPromise(signal)
]);
clearTimeout(timeoutId);
return result;
} finally {
this.operations.delete(operationId);
}
}
cancel(operationId) {
const controller = this.operations.get(operationId);
if (controller) {
controller.abort();
return true;
}
return false;
}
cancelAll() {
for (const controller of this.operations.values()) {
controller.abort();
}
this.operations.clear();
}
createAbortPromise(signal) {
return new Promise((_, reject) => {
signal.addEventListener('abort', () => {
reject(new Error('Operation was cancelled'));
});
});
}
}
// Fetch with cancellation support
async function cancellableFetch(url, options = {}, signal) {
const response = await fetch(url, { ...options, signal });
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
// Check for cancellation during processing
if (signal.aborted) {
throw new Error('Operation was cancelled');
}
return response.json();
}
// Usage in component
class DataManager {
constructor() {
this.cancellable = new CancellableOperation();
}
async loadUserData(userId) {
const operationId = `user-${userId}`;
try {
return await this.cancellable.execute(
operationId,
async (signal) => {
const [profile, preferences, history] = await Promise.all([
cancellableFetch(`/api/users/${userId}`, {}, signal),
cancellableFetch(`/api/users/${userId}/preferences`, {}, signal),
cancellableFetch(`/api/users/${userId}/history`, {}, signal)
]);
return { profile, preferences, history };
},
10000
);
} catch (error) {
if (error.message === 'Operation was cancelled') {
console.log('User data loading was cancelled');
return null;
}
throw error;
}
}
cancelUserDataLoad(userId) {
return this.cancellable.cancel(`user-${userId}`);
}
}
These async programming patterns form the foundation of robust, scalable JavaScript applications. I’ve found that combining these patterns appropriately based on specific use cases leads to maintainable code that handles real-world complexity gracefully. The key lies in understanding when to apply each pattern and how to compose them effectively for optimal performance and reliability.