This issue occurs when multiple asynchronous operations execute concurrently and complete in an order different from what the code expects, leading to incorrect state, stale data, or data corruption. While Node.js is single-threaded, its asynchronous nature means operations can finish in any order.
A race condition in Node.js happens when the behavior of your code depends on the relative timing or completion order of asynchronous operations, and those operations don't complete in the sequence you anticipated. This creates non-deterministic bugs that may only appear intermittently or under specific timing conditions. Although Node.js runs JavaScript in a single thread using an event loop, it handles I/O operations (file system, network requests, database queries) asynchronously through the libuv library. Multiple async operations can be "in flight" simultaneously, and they complete whenever the underlying system (network, disk, database) responds. This means a request started later might finish earlier than one started first. Common scenarios include: API autocomplete where older search results overwrite newer ones, database updates where a slower query overwrites a faster one, file operations where reads happen before writes complete, or concurrent HTTP requests returning out of sequence. Unlike traditional multithreading race conditions, Node.js race conditions arise from callback timing and promise resolution order rather than parallel execution.
Add tracking identifiers to detect when stale operations complete:
// Example: Autocomplete with race condition detection
let currentRequestId = 0;
async function handleSearch(query) {
const requestId = ++currentRequestId;
console.log(`Starting request ${requestId} for: ${query}`);
const results = await fetch(`/api/search?q=${query}`).then(r => r.json());
// Check if this is still the most recent request
if (requestId !== currentRequestId) {
console.warn(`Discarding stale results from request ${requestId}`);
return; // Ignore outdated results
}
updateUI(results);
}
// Usage
searchInput.addEventListener('input', (e) => {
handleSearch(e.target.value);
});This pattern prevents older requests from overwriting newer results by checking if a more recent request has been initiated.
Cancel previous requests when new ones are initiated:
let abortController = null;
async function handleSearch(query) {
// Cancel previous request if still running
if (abortController) {
abortController.abort();
}
// Create new abort controller for this request
abortController = new AbortController();
try {
const results = await fetch(`/api/search?q=${query}`, {
signal: abortController.signal
}).then(r => r.json());
updateUI(results);
} catch (err) {
if (err.name === 'AbortError') {
console.log('Request cancelled due to newer request');
} else {
throw err;
}
}
}For Node.js HTTP requests with axios:
const axios = require('axios');
let cancelToken = null;
async function fetchData(params) {
if (cancelToken) {
cancelToken.cancel('Operation cancelled due to new request');
}
cancelToken = axios.CancelToken.source();
try {
const response = await axios.get('/api/data', {
params,
cancelToken: cancelToken.token
});
return response.data;
} catch (err) {
if (axios.isCancel(err)) {
console.log('Request cancelled:', err.message);
} else {
throw err;
}
}
}Ensure operations that depend on order use await to execute sequentially:
// Bad: Race condition - reads may happen before write completes
async function updateConfig(newConfig) {
fs.promises.writeFile('config.json', JSON.stringify(newConfig)); // Missing await!
const config = await fs.promises.readFile('config.json', 'utf8');
return JSON.parse(config); // May return old config
}
// Good: Sequential execution
async function updateConfig(newConfig) {
await fs.promises.writeFile('config.json', JSON.stringify(newConfig));
const config = await fs.promises.readFile('config.json', 'utf8');
return JSON.parse(config); // Always returns updated config
}
// Bad: Parallel database updates cause race condition
async function incrementCounter(userId) {
const user = await db.users.findOne({ id: userId });
const newCount = user.count + 1;
await db.users.update({ id: userId }, { count: newCount }); // Race if called concurrently
}
// Good: Atomic update
async function incrementCounter(userId) {
await db.users.update(
{ id: userId },
{ $inc: { count: 1 } } // Atomic operation
);
}Use sequential iteration when order matters:
// Bad: Promise.all executes concurrently
for (const item of items) {
await processItem(item); // Sequential
}
// Only use Promise.all when order doesn't matter
await Promise.all(items.map(item => processItem(item))); // ConcurrentPrevent concurrent access to shared resources using locks:
const { Mutex } = require('async-mutex');
const mutex = new Mutex();
let sharedCounter = 0;
async function incrementWithLock() {
// Acquire lock before accessing shared state
const release = await mutex.acquire();
try {
// Critical section - only one async operation at a time
const current = sharedCounter;
await someAsyncOperation(); // Simulated delay
sharedCounter = current + 1;
} finally {
release(); // Always release the lock
}
}
// Multiple calls won't race
await Promise.all([
incrementWithLock(),
incrementWithLock(),
incrementWithLock()
]);
console.log(sharedCounter); // Always 3For resource-limited operations, use semaphores:
const { Semaphore } = require('async-mutex');
// Allow max 3 concurrent database queries
const semaphore = new Semaphore(3);
async function queryDatabase(query) {
const [value, release] = await semaphore.acquire();
try {
return await db.query(query);
} finally {
release();
}
}
// Only 3 queries run at once, preventing database overload
const results = await Promise.all(
queries.map(q => queryDatabase(q))
);Reduce the number of concurrent operations to prevent races:
// Debounce: Only execute after user stops typing
function debounce(fn, delay) {
let timeoutId;
return function(...args) {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => fn.apply(this, args), delay);
};
}
const debouncedSearch = debounce(async (query) => {
const results = await fetch(`/api/search?q=${query}`).then(r => r.json());
updateUI(results);
}, 300);
searchInput.addEventListener('input', (e) => {
debouncedSearch(e.target.value);
});
// Throttle: Execute at most once per interval
function throttle(fn, interval) {
let lastCall = 0;
return function(...args) {
const now = Date.now();
if (now - lastCall >= interval) {
lastCall = now;
return fn.apply(this, args);
}
};
}
const throttledSave = throttle(async (data) => {
await saveToDatabase(data);
}, 1000);Using lodash:
const _ = require('lodash');
const debouncedSearch = _.debounce(async (query) => {
const results = await fetch(`/api/search?q=${query}`).then(r => r.json());
updateUI(results);
}, 300);Use version numbers or timestamps to detect conflicting updates:
// Schema includes version field
// { id: 1, data: "value", version: 1 }
async function updateWithOptimisticLock(id, newData) {
while (true) {
// Read current version
const record = await db.findOne({ id });
const currentVersion = record.version;
// Attempt update with version check
const result = await db.updateOne(
{ id, version: currentVersion }, // Only update if version matches
{
data: newData,
version: currentVersion + 1
}
);
if (result.modifiedCount === 1) {
return; // Success
}
// Version mismatch - another operation updated first
console.log('Concurrent update detected, retrying...');
await new Promise(resolve => setTimeout(resolve, 100)); // Brief delay
}
}Using Prisma with version-based locking:
async function updateUser(userId, updates) {
const user = await prisma.user.findUnique({ where: { id: userId } });
try {
return await prisma.user.update({
where: {
id: userId,
version: user.version // Only update if version matches
},
data: {
...updates,
version: user.version + 1
}
});
} catch (err) {
throw new Error('Concurrent update detected - record was modified');
}
}Testing Race Conditions
Race conditions are notoriously difficult to test because they depend on timing. Use these strategies:
// Introduce artificial delays to expose race conditions
async function testRaceCondition() {
const results = [];
// Start multiple operations with varying delays
await Promise.all([
(async () => {
await new Promise(resolve => setTimeout(resolve, 100));
results.push('slow');
})(),
(async () => {
await new Promise(resolve => setTimeout(resolve, 10));
results.push('fast');
})()
]);
console.log(results); // May be ['fast', 'slow'] or ['slow', 'fast']
}Race Condition Detection Tools
- async-race-detector: Runtime detection library that warns about potential races
- Node.js --trace-promises: Track promise lifecycle for debugging
- Chrome DevTools Async Stack Traces: Enable in Performance tab to visualize async call chains
Event Loop and Microtask Queue
Understanding Node.js event loop helps predict async behavior:
console.log('1. Sync');
setTimeout(() => console.log('2. Macro task'), 0);
Promise.resolve().then(() => console.log('3. Micro task'));
console.log('4. Sync');
// Output: 1, 4, 3, 2
// Microtasks (promises) run before macrotasks (setTimeout)Promise.race() for Timeout Enforcement
Prevent indefinitely hanging operations:
async function fetchWithTimeout(url, timeout = 5000) {
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Request timeout')), timeout)
);
return Promise.race([
fetch(url).then(r => r.json()),
timeoutPromise
]);
}Redux/State Management Race Conditions
In React/Redux applications:
// Redux Thunk with request tracking
const FETCH_START = 'FETCH_START';
const FETCH_SUCCESS = 'FETCH_SUCCESS';
let currentRequestId = 0;
function fetchData(params) {
return async (dispatch) => {
const requestId = ++currentRequestId;
dispatch({ type: FETCH_START, requestId });
const data = await api.fetchData(params);
// Only dispatch if still the latest request
if (requestId === currentRequestId) {
dispatch({ type: FETCH_SUCCESS, data });
}
};
}Worker Threads and True Parallelism
Node.js worker threads can create true race conditions requiring traditional synchronization:
const { Worker } = require('worker_threads');
// Workers run in parallel, can have true race conditions
const worker1 = new Worker('./worker.js');
const worker2 = new Worker('./worker.js');
// Use SharedArrayBuffer with Atomics for thread-safe operations
const sharedBuffer = new SharedArrayBuffer(4);
const sharedArray = new Int32Array(sharedBuffer);
// Atomic increment (thread-safe)
Atomics.add(sharedArray, 0, 1);Database Transaction Isolation
Use proper transaction isolation levels to prevent database race conditions:
// PostgreSQL with Prisma
await prisma.$transaction(async (tx) => {
const account = await tx.account.findUnique({ where: { id } });
await tx.account.update({
where: { id },
data: { balance: account.balance + amount }
});
}, {
isolationLevel: 'Serializable' // Highest isolation level
});Common isolation levels:
- READ UNCOMMITTED: Lowest, allows dirty reads
- READ COMMITTED: Default for many DBs
- REPEATABLE READ: Prevents non-repeatable reads
- SERIALIZABLE: Highest, prevents all race conditions but may reduce performance
Error: Listener already called (once event already fired)
EventEmitter listener already called with once()
Error: EACCES: permission denied, open '/root/file.txt'
EACCES: permission denied
Error: Invalid encoding specified (stream encoding not supported)
How to fix Invalid encoding error in Node.js readable streams
Error: EINVAL: invalid argument, open
EINVAL: invalid argument, open
TypeError: readableLength must be a positive integer (stream config)
TypeError: readableLength must be a positive integer in Node.js streams