perf: memoize hash filter to eliminate redundant disk reads per build
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
+7
-4
@@ -740,14 +740,17 @@ export default function (eleventyConfig) {
|
|||||||
return url.endsWith("/") ? url.slice(0, -1) : url;
|
return url.endsWith("/") ? url.slice(0, -1) : url;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Hash filter for cache busting - generates MD5 hash of file content
|
// Hash filter for cache busting — memoized (same paths repeat across every page render)
|
||||||
|
const _hashCache = new Map();
|
||||||
|
eleventyConfig.on("eleventy.before", () => { _hashCache.clear(); });
|
||||||
eleventyConfig.addFilter("hash", (filePath) => {
|
eleventyConfig.addFilter("hash", (filePath) => {
|
||||||
|
if (_hashCache.has(filePath)) return _hashCache.get(filePath);
|
||||||
try {
|
try {
|
||||||
const fullPath = resolve(__dirname, filePath.startsWith("/") ? `.${filePath}` : filePath);
|
const fullPath = resolve(__dirname, filePath.startsWith("/") ? `.${filePath}` : filePath);
|
||||||
const content = readFileSync(fullPath);
|
const result = createHash("md5").update(readFileSync(fullPath)).digest("hex").slice(0, 8);
|
||||||
return createHash("md5").update(content).digest("hex").slice(0, 8);
|
_hashCache.set(filePath, result);
|
||||||
|
return result;
|
||||||
} catch {
|
} catch {
|
||||||
// Return timestamp as fallback if file not found
|
|
||||||
return Date.now().toString(36);
|
return Date.now().toString(36);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user