Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/olive-onions-yawn.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"watchpack": patch
---

Improve perfomance for ignored and improve perfomance for reduce plan.
37 changes: 27 additions & 10 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,8 @@ const watchEventSource = require("./watchEventSource");
*/
function addWatchersToSet(watchers, set) {
for (const ww of watchers) {
const w = ww.watcher;
if (!set.has(w.directoryWatcher)) {
set.add(w.directoryWatcher);
}
// Set.add is already idempotent, so skip the redundant has() probe.
set.add(ww.watcher.directoryWatcher);
}
}

Expand All @@ -79,27 +77,44 @@ const stringToRegexp = (ignored) => {
return `${source.slice(0, -1)}(?:$|\\/)`;
};

/**
* Normalizes path separators for regex testing. `String.prototype.replace`
* always allocates a new string, even when the pattern finds nothing; for
* POSIX paths (the common case) that allocation is pure overhead. Check for
* a backslash with `indexOf` first so we skip the copy on paths that are
* already normalized.
* @param {string} item item
* @returns {string} item with backslashes normalized to forward slashes
*/
const normalizeSeparators = (item) =>
item.includes("\\") ? item.replace(/\\/g, "/") : item;

/**
* @param {Ignored=} ignored ignored
* @returns {(item: string) => boolean} ignored to function
*/
const ignoredToFunction = (ignored) => {
if (Array.isArray(ignored)) {
const stringRegexps = ignored.map((i) => stringToRegexp(i)).filter(Boolean);
const stringRegexps =
/** @type {string[]} */
(ignored.map((i) => stringToRegexp(i)).filter(Boolean));
if (stringRegexps.length === 0) {
return () => false;
}
const regexp = new RegExp(stringRegexps.join("|"));
return (item) => regexp.test(item.replace(/\\/g, "/"));
const regexp =
stringRegexps.length === 1
? new RegExp(stringRegexps[0])
: new RegExp(stringRegexps.join("|"));
return (item) => regexp.test(normalizeSeparators(item));
} else if (typeof ignored === "string") {
const stringRegexp = stringToRegexp(ignored);
if (!stringRegexp) {
return () => false;
}
const regexp = new RegExp(stringRegexp);
return (item) => regexp.test(item.replace(/\\/g, "/"));
return (item) => regexp.test(normalizeSeparators(item));
} else if (ignored instanceof RegExp) {
return (item) => ignored.test(item.replace(/\\/g, "/"));
return (item) => ignored.test(normalizeSeparators(item));
} else if (typeof ignored === "function") {
return ignored;
} else if (ignored) {
Expand Down Expand Up @@ -463,8 +478,10 @@ class Watchpack extends EventEmitter {
/** @type {Record<string, number>} */
const obj = Object.create(null);
for (const w of directoryWatchers) {
// getTimes() returns a prototype-less object, so for...in is safe
// and avoids the throwaway array that Object.keys would allocate.
const times = w.getTimes();
for (const file of Object.keys(times)) obj[file] = times[file];
for (const file in times) obj[file] = times[file];
}
return obj;
}
Expand Down
91 changes: 56 additions & 35 deletions lib/reducePlan.js
Original file line number Diff line number Diff line change
Expand Up @@ -67,45 +67,66 @@ module.exports = (plan, limit) => {
}
}
}
// Reduce until limit reached
while (currentCount > limit) {
// Select node that helps reaching the limit most effectively without overmerging
const overLimit = currentCount - limit;
let bestNode;
let bestCost = Infinity;
// Reduce until limit reached. When no reduction is needed at all, skip
// building the candidate set entirely to avoid paying for the setup on the
// common fast path.
if (currentCount > limit) {
// Pre-filter candidate nodes so the inner selection loop skips structural
// non-candidates entirely. `children` length and parent presence are
// fixed after tree construction; only `entries` can change (it can only
// decrease), so a node that fails the `entries` check in a later round
// is simply skipped via `continue`. When we merge a subtree we drop the
// descendants from the candidate set to keep it shrinking over
// iterations.
/** @type {Set<TreeNode<T>>} */
const candidates = new Set();
for (const node of treeMap.values()) {
if (node.entries <= 1 || !node.children || !node.parent) continue;
if (!node.parent || !node.children) continue;
if (node.children.length === 0) continue;
if (node.children.length === 1 && !node.value) continue;
// Try to select the node with has just a bit more entries than we need to reduce
// When just a bit more is over 30% over the limit,
// also consider just a bit less entries then we need to reduce
const cost =
node.entries - 1 >= overLimit
? node.entries - 1 - overLimit
: overLimit - node.entries + 1 + limit * 0.3;
if (cost < bestCost) {
bestNode = node;
bestCost = cost;
}
}
if (!bestNode) break;
// Merge all children
const reduction = bestNode.entries - 1;
bestNode.active = true;
bestNode.entries = 1;
currentCount -= reduction;
let { parent } = bestNode;
while (parent) {
parent.entries -= reduction;
parent = parent.parent;
candidates.add(node);
}
const queue = new Set(bestNode.children);
for (const node of queue) {
node.active = false;
node.entries = 0;
if (node.children) {
for (const child of node.children) queue.add(child);
const costBias = limit * 0.3;
while (currentCount > limit) {
// Select node that helps reaching the limit most effectively without overmerging
const overLimit = currentCount - limit;
let bestNode;
let bestCost = Infinity;
for (const node of candidates) {
if (node.entries <= 1) continue;
// Try to select the node with has just a bit more entries than we need to reduce
// When just a bit more is over 30% over the limit,
// also consider just a bit less entries then we need to reduce
const diff = node.entries - 1 - overLimit;
const cost = diff >= 0 ? diff : -diff + costBias;
if (cost < bestCost) {
bestNode = node;
bestCost = cost;
// A cost of 0 means the merge reduces exactly to the limit;
// no further candidate can improve on that, so stop scanning.
if (cost === 0) break;
}
}
if (!bestNode) break;
// Merge all children
const reduction = bestNode.entries - 1;
bestNode.active = true;
bestNode.entries = 1;
candidates.delete(bestNode);
currentCount -= reduction;
let { parent } = bestNode;
while (parent) {
parent.entries -= reduction;
parent = parent.parent;
}
const queue = new Set(bestNode.children);
for (const node of queue) {
node.active = false;
node.entries = 0;
candidates.delete(node);
if (node.children) {
for (const child of node.children) queue.add(child);
}
}
}
}
Expand Down
28 changes: 18 additions & 10 deletions lib/watchEventSource.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,17 @@ function createEPERMError(filePath) {
* @returns {(type: "rename" | "change", filename: string) => void} handler of change event
*/
function createHandleChangeEvent(watcher, filePath, handleChangeEvent) {
// path.basename(filePath) is invariant for the lifetime of the watcher,
// so compute it once rather than on every dispatched event.
const ownBasename = path.basename(filePath);
return (type, filename) => {
// TODO: After Node.js v22, fs.watch(dir) and deleting a dir will trigger the rename change event.
// Here we just ignore it and keep the same behavior as before v22
// https://github.com/libuv/libuv/pull/4376
if (
type === "rename" &&
path.isAbsolute(filename) &&
path.basename(filename) === path.basename(filePath)
path.basename(filename) === ownBasename
) {
if (!IS_OSX) {
// Before v22, windows will throw EPERM error
Expand Down Expand Up @@ -429,16 +432,21 @@ module.exports.watch = (filePath) => {
directWatcher.add(watcher);
return watcher;
}
let current = filePath;
for (;;) {
const recursiveWatcher = recursiveWatchers.get(current);
if (recursiveWatcher !== undefined) {
recursiveWatcher.add(filePath, watcher);
return watcher;
// Only platforms with recursive fs.watch ever populate recursiveWatchers,
// so skip the entire parent walk when the map is empty (always the case
// on Linux and the common case before the watcher limit is reached).
if (recursiveWatchers.size !== 0) {
let current = filePath;
for (;;) {
const recursiveWatcher = recursiveWatchers.get(current);
if (recursiveWatcher !== undefined) {
recursiveWatcher.add(filePath, watcher);
return watcher;
}
const parent = path.dirname(current);
if (parent === current) break;
current = parent;
}
const parent = path.dirname(current);
if (parent === current) break;
current = parent;
}
// Queue up watcher for creation
pendingWatchers.set(watcher, filePath);
Expand Down
Loading