From ee31d232c29547ebec2ed331d89b6be369a72e9b Mon Sep 17 00:00:00 2001 From: Claude Bot Date: Fri, 19 Sep 2025 09:22:12 +0000 Subject: [PATCH] fix(bundler): HTML entrypoint hash now updates when dependencies change MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, HTML chunk hashes were not recomputed when their JS/CSS dependencies changed because the isolated_hash computation for HTML chunks didn't account for the hashes of their dependencies. This caused browsers to cache stale HTML files that referenced old asset URLs, leading to 404 errors. The fix: 1. Added generateIsolatedHashWithChunks() that accepts a chunks array 2. HTML chunks now include their JS/CSS dependencies' hashes in their own hash 3. Process non-HTML chunks before HTML chunks to ensure dependency hashes are computed This ensures HTML files get new hashes when their dependencies change, preventing browser caching issues reported in https://github.com/NDC-Tourney/stream-overlay/pull/40 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- src/bundler/LinkerContext.zig | 17 ++ .../generateChunksInParallel.zig | 23 ++- .../linker_context/postProcessHTMLChunk.zig | 2 +- .../bundler_html_entrypoint_hash.test.ts | 162 ++++++++++++++++++ 4 files changed, 195 insertions(+), 9 deletions(-) create mode 100644 test/bundler/bundler_html_entrypoint_hash.test.ts diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index e05da53777..793fe3f32e 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -821,11 +821,28 @@ pub const LinkerContext = struct { } pub fn generateIsolatedHash(c: *LinkerContext, chunk: *const Chunk) u64 { + return c.generateIsolatedHashWithChunks(chunk, &[_]Chunk{}); + } + + pub fn generateIsolatedHashWithChunks(c: *LinkerContext, chunk: *const Chunk, chunks: []Chunk) u64 { const trace = bun.perf.trace("Bundler.generateIsolatedHash"); defer trace.end(); var hasher = ContentHasher{}; + // For HTML chunks, include the isolated hashes of the JS and CSS chunks they depend on + // This ensures the HTML chunk hash changes when its dependencies change + if (chunk.content == .html and chunks.len > 0) { + if (chunk.getJSChunkForHTML(chunks)) |js_chunk| { + const hash_bytes = std.mem.asBytes(&js_chunk.isolated_hash); + hasher.write(hash_bytes); + } + if (chunk.getCSSChunkForHTML(chunks)) |css_chunk| { + const hash_bytes = std.mem.asBytes(&css_chunk.isolated_hash); + hasher.write(hash_bytes); + } + } + // Mix the file names and part ranges of all of the files in this chunk into // the hash. Objects that appear identical but that live in separate files or // that live in separate parts in the same file must not be merged. This only diff --git a/src/bundler/linker_context/generateChunksInParallel.zig b/src/bundler/linker_context/generateChunksInParallel.zig index 1cc1a05bf1..88e183817f 100644 --- a/src/bundler/linker_context/generateChunksInParallel.zig +++ b/src/bundler/linker_context/generateChunksInParallel.zig @@ -181,15 +181,22 @@ pub fn generateChunksInParallel( const chunks_to_do = if (is_dev_server) chunks[1..] else chunks; if (!is_dev_server or chunks_to_do.len > 0) { bun.assert(chunks_to_do.len > 0); - debug(" START {d} postprocess chunks", .{chunks_to_do.len}); - defer debug(" DONE {d} postprocess chunks", .{chunks_to_do.len}); - try c.parse_graph.pool.worker_pool.eachPtr( - c.allocator(), - chunk_contexts[0], - generateChunk, - chunks_to_do, - ); + // Process JS and CSS chunks first (so their isolated_hash is computed) + // before processing HTML chunks (which depend on those hashes) + // First, process all non-HTML chunks + for (chunks_to_do, 0..) |*chunk, i| { + if (chunk.content != .html) { + generateChunk(chunk_contexts[0], chunk, i); + } + } + + // Then process HTML chunks (which can now use the computed hashes) + for (chunks_to_do, 0..) |*chunk, i| { + if (chunk.content == .html) { + generateChunk(chunk_contexts[0], chunk, i); + } + } } } diff --git a/src/bundler/linker_context/postProcessHTMLChunk.zig b/src/bundler/linker_context/postProcessHTMLChunk.zig index 65ff00a67c..cba3b854b5 100644 --- a/src/bundler/linker_context/postProcessHTMLChunk.zig +++ b/src/bundler/linker_context/postProcessHTMLChunk.zig @@ -22,7 +22,7 @@ pub fn postProcessHTMLChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, c @as(u32, @truncate(ctx.chunks.len)), ) catch |err| bun.handleOom(err); - chunk.isolated_hash = c.generateIsolatedHash(chunk); + chunk.isolated_hash = c.generateIsolatedHashWithChunks(chunk, ctx.chunks); } const bun = @import("bun"); diff --git a/test/bundler/bundler_html_entrypoint_hash.test.ts b/test/bundler/bundler_html_entrypoint_hash.test.ts new file mode 100644 index 0000000000..2a54cd217a --- /dev/null +++ b/test/bundler/bundler_html_entrypoint_hash.test.ts @@ -0,0 +1,162 @@ +import { describe, test, expect } from "bun:test"; +import { bunEnv, bunExe, tempDir } from "harness"; +import { join } from "path"; + +describe("HTML entrypoint isolated_hash", () => { + test("HTML chunk hash should change when JS dependencies change", async () => { + using dir = tempDir("html-hash-js-test", { + "index.html": ` + + + Test + + + +

Hello World

+ +`, + "index.js": `console.log("version 1");`, + }); + + // First build + const result1 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist1"), + naming: "[name]-[hash].[ext]", + }); + + expect(result1.success).toBe(true); + + // Find HTML output + const htmlOutput1 = result1.outputs.find((o) => o.path.endsWith(".html")); + expect(htmlOutput1).toBeDefined(); + const htmlPath1 = htmlOutput1!.path; + const htmlHash1 = htmlPath1.match(/index-([a-z0-9]+)\.html/)?.[1]; + expect(htmlHash1).toBeDefined(); + + // Modify JS + await Bun.write(join(String(dir), "index.js"), `console.log("version 2");`); + + // Second build + const result2 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist2"), + naming: "[name]-[hash].[ext]", + }); + + expect(result2.success).toBe(true); + + // Find HTML output + const htmlOutput2 = result2.outputs.find((o) => o.path.endsWith(".html")); + expect(htmlOutput2).toBeDefined(); + const htmlPath2 = htmlOutput2!.path; + const htmlHash2 = htmlPath2.match(/index-([a-z0-9]+)\.html/)?.[1]; + expect(htmlHash2).toBeDefined(); + + // Check if HTML hash changed when JS changed + expect(htmlHash1).not.toBe(htmlHash2); + }); + + test("HTML chunk hash should change when CSS dependencies change", async () => { + using dir = tempDir("html-hash-css-test", { + "index.html": ` + + + Test + + + +

Hello World

+ +`, + "index.css": `body { color: red; }`, + }); + + // First build + const result1 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist1"), + naming: "[name]-[hash].[ext]", + }); + + expect(result1.success).toBe(true); + + // Find HTML output + const htmlOutput1 = result1.outputs.find((o) => o.path.endsWith(".html")); + expect(htmlOutput1).toBeDefined(); + const htmlPath1 = htmlOutput1!.path; + const htmlHash1 = htmlPath1.match(/index-([a-z0-9]+)\.html/)?.[1]; + expect(htmlHash1).toBeDefined(); + + // Modify CSS + await Bun.write(join(String(dir), "index.css"), `body { color: blue; }`); + + // Second build + const result2 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist2"), + naming: "[name]-[hash].[ext]", + }); + + expect(result2.success).toBe(true); + + // Find HTML output + const htmlOutput2 = result2.outputs.find((o) => o.path.endsWith(".html")); + expect(htmlOutput2).toBeDefined(); + const htmlPath2 = htmlOutput2!.path; + const htmlHash2 = htmlPath2.match(/index-([a-z0-9]+)\.html/)?.[1]; + expect(htmlHash2).toBeDefined(); + + // Check if HTML hash changed when CSS changed + expect(htmlHash1).not.toBe(htmlHash2); + }); + + test("HTML chunk hash should not change when dependencies don't change", async () => { + using dir = tempDir("html-hash-stable-test", { + "index.html": ` + + + Test + + + + +

Hello World

+ +`, + "index.js": `console.log("stable");`, + "index.css": `body { color: green; }`, + }); + + // First build + const result1 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist1"), + naming: "[name]-[hash].[ext]", + }); + + expect(result1.success).toBe(true); + + // Second build without any changes + const result2 = await Bun.build({ + entrypoints: [join(String(dir), "index.html")], + outdir: join(String(dir), "dist2"), + naming: "[name]-[hash].[ext]", + }); + + expect(result2.success).toBe(true); + + // Find HTML outputs + const htmlOutput1 = result1.outputs.find((o) => o.path.endsWith(".html")); + const htmlOutput2 = result2.outputs.find((o) => o.path.endsWith(".html")); + + expect(htmlOutput1).toBeDefined(); + expect(htmlOutput2).toBeDefined(); + + const htmlHash1 = htmlOutput1!.path.match(/index-([a-z0-9]+)\.html/)?.[1]; + const htmlHash2 = htmlOutput2!.path.match(/index-([a-z0-9]+)\.html/)?.[1]; + + // Hashes should be the same when nothing changes + expect(htmlHash1).toBe(htmlHash2); + }); +}); \ No newline at end of file