mirror of
https://github.com/oven-sh/bun
synced 2026-02-12 20:09:04 +00:00
feat: add source map compression support
- Compress source map files (.map) when compression is enabled - Generate .map.gz and .map.zst files alongside compressed JS files - Update tests to verify source maps are compressed correctly - Test multiple compression formats used together (both gzip and zstd) - Source maps often larger than JS files, so compression is valuable 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -142,6 +142,121 @@ pub fn writeOutputFilesToDisk(
|
||||
.result => {},
|
||||
}
|
||||
|
||||
// Write compressed versions of source map if requested
|
||||
if (c.options.compression.gzip) {
|
||||
const libdeflate = @import("../../deps/libdeflate.zig");
|
||||
libdeflate.load();
|
||||
|
||||
const compressor = libdeflate.Compressor.alloc(6) orelse {
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to allocate gzip compressor for source map {}", .{
|
||||
bun.fmt.quote(source_map_final_rel_path),
|
||||
}) catch unreachable;
|
||||
return error.CompressionFailed;
|
||||
};
|
||||
defer compressor.deinit();
|
||||
|
||||
const max_size = compressor.maxBytesNeeded(output_source_map, .gzip);
|
||||
const gzip_buffer = bun.default_allocator.alloc(u8, max_size) catch {
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to allocate memory for gzip compression of source map {}", .{
|
||||
bun.fmt.quote(source_map_final_rel_path),
|
||||
}) catch unreachable;
|
||||
return error.CompressionFailed;
|
||||
};
|
||||
defer bun.default_allocator.free(gzip_buffer);
|
||||
|
||||
const gzip_result = compressor.gzip(output_source_map, gzip_buffer);
|
||||
const gzip_path = try std.fmt.allocPrint(bun.default_allocator, "{s}.gz", .{source_map_final_rel_path});
|
||||
defer bun.default_allocator.free(gzip_path);
|
||||
|
||||
switch (jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
|
||||
&pathbuf,
|
||||
.{
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(gzip_buffer.ptr),
|
||||
.len = @as(u32, @truncate(gzip_result.written)),
|
||||
.byte_len = @as(u32, @truncate(gzip_result.written)),
|
||||
},
|
||||
},
|
||||
},
|
||||
.encoding = .buffer,
|
||||
.dirfd = .fromStdDir(root_dir),
|
||||
.file = .{
|
||||
.path = jsc.Node.PathLike{
|
||||
.string = bun.PathString.init(gzip_path),
|
||||
},
|
||||
},
|
||||
},
|
||||
)) {
|
||||
.err => |err| {
|
||||
try c.log.addSysError(bun.default_allocator, err, "writing gzip compressed source map {}", .{
|
||||
bun.fmt.quote(gzip_path),
|
||||
});
|
||||
return error.WriteFailed;
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (c.options.compression.zstd) {
|
||||
const zstd = @import("../../deps/zstd.zig");
|
||||
|
||||
const max_size = zstd.compressBound(output_source_map.len);
|
||||
const zstd_buffer = bun.default_allocator.alloc(u8, max_size) catch {
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to allocate memory for zstd compression of source map {}", .{
|
||||
bun.fmt.quote(source_map_final_rel_path),
|
||||
}) catch unreachable;
|
||||
return error.CompressionFailed;
|
||||
};
|
||||
defer bun.default_allocator.free(zstd_buffer);
|
||||
|
||||
const zstd_result = zstd.compress(zstd_buffer, output_source_map, 3);
|
||||
const compressed_size = switch (zstd_result) {
|
||||
.success => |size| size,
|
||||
.err => |msg| {
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to zstd compress source map {}: {s}", .{
|
||||
bun.fmt.quote(source_map_final_rel_path),
|
||||
msg,
|
||||
}) catch unreachable;
|
||||
return error.CompressionFailed;
|
||||
},
|
||||
};
|
||||
|
||||
const zstd_path = try std.fmt.allocPrint(bun.default_allocator, "{s}.zst", .{source_map_final_rel_path});
|
||||
defer bun.default_allocator.free(zstd_path);
|
||||
|
||||
switch (jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
|
||||
&pathbuf,
|
||||
.{
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(zstd_buffer.ptr),
|
||||
.len = @as(u32, @truncate(compressed_size)),
|
||||
.byte_len = @as(u32, @truncate(compressed_size)),
|
||||
},
|
||||
},
|
||||
},
|
||||
.encoding = .buffer,
|
||||
.dirfd = .fromStdDir(root_dir),
|
||||
.file = .{
|
||||
.path = jsc.Node.PathLike{
|
||||
.string = bun.PathString.init(zstd_path),
|
||||
},
|
||||
},
|
||||
},
|
||||
)) {
|
||||
.err => |err| {
|
||||
try c.log.addSysError(bun.default_allocator, err, "writing zstd compressed source map {}", .{
|
||||
bun.fmt.quote(zstd_path),
|
||||
});
|
||||
return error.WriteFailed;
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
|
||||
source_map_output_file = options.OutputFile.init(.{
|
||||
.output_path = source_map_final_rel_path,
|
||||
.input_path = try strings.concat(bun.default_allocator, &.{ input_path, ".map" }),
|
||||
|
||||
@@ -227,7 +227,7 @@ describe("Bun.build compress API", () => {
|
||||
expect(zstdSize).toBeLessThan(originalSize * 0.2);
|
||||
});
|
||||
|
||||
test("compress with sourcemap", async () => {
|
||||
test("compress with sourcemap compresses both files", async () => {
|
||||
const tmpdir = tmpdirSync();
|
||||
const entryPath = path.join(tmpdir, "entry.ts");
|
||||
const outdir = path.join(tmpdir, "out");
|
||||
@@ -241,17 +241,35 @@ console.log(message);`,
|
||||
const result = await Bun.build({
|
||||
entrypoints: [entryPath],
|
||||
outdir,
|
||||
compress: "gzip",
|
||||
compress: { gzip: true, zstd: true },
|
||||
sourcemap: "external",
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Check all files exist
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js.gz"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js.zst"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js.map"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js.map.gz"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "entry.js.map.zst"))).toBe(true);
|
||||
|
||||
// Verify compressed file is valid
|
||||
const gzContent = fs.readFileSync(path.join(outdir, "entry.js.gz"));
|
||||
expect(() => zlib.gunzipSync(gzContent)).not.toThrow();
|
||||
// Verify gzip files are valid
|
||||
const jsGz = fs.readFileSync(path.join(outdir, "entry.js.gz"));
|
||||
const mapGz = fs.readFileSync(path.join(outdir, "entry.js.map.gz"));
|
||||
expect(() => zlib.gunzipSync(jsGz)).not.toThrow();
|
||||
expect(() => zlib.gunzipSync(mapGz)).not.toThrow();
|
||||
|
||||
// Verify zstd files have correct magic bytes
|
||||
const jsZst = fs.readFileSync(path.join(outdir, "entry.js.zst"));
|
||||
const mapZst = fs.readFileSync(path.join(outdir, "entry.js.map.zst"));
|
||||
expect(jsZst[0]).toBe(0x28);
|
||||
expect(mapZst[0]).toBe(0x28);
|
||||
|
||||
// Verify decompressed content matches
|
||||
const original = fs.readFileSync(path.join(outdir, "entry.js"));
|
||||
const decompressed = zlib.gunzipSync(jsGz);
|
||||
expect(decompressed).toEqual(original);
|
||||
});
|
||||
});
|
||||
@@ -235,7 +235,7 @@ describe("bun build --compress", () => {
|
||||
expect(stderr.toString()).toContain("Valid formats: 'gzip', 'zstd'");
|
||||
});
|
||||
|
||||
test("compression works with source maps", () => {
|
||||
test("compression works with source maps and compresses both", () => {
|
||||
const tmpdir = tmpdirSync();
|
||||
const srcFile = path.join(tmpdir, "index.ts");
|
||||
const outdir = path.join(tmpdir, "out");
|
||||
@@ -249,7 +249,7 @@ describe("bun build --compress", () => {
|
||||
);
|
||||
|
||||
const { exitCode, stderr } = Bun.spawnSync({
|
||||
cmd: [bunExe(), "build", srcFile, "--outdir", outdir, "--sourcemap=external", "--compress=gzip"],
|
||||
cmd: [bunExe(), "build", srcFile, "--outdir", outdir, "--sourcemap=external", "--compress=gzip", "--compress=zstd"],
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
});
|
||||
@@ -257,17 +257,27 @@ describe("bun build --compress", () => {
|
||||
expect(stderr.toString()).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that main file and its compressed version exist
|
||||
// Check that all files and their compressed versions exist
|
||||
expect(fs.existsSync(path.join(outdir, "index.js"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "index.js.gz"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "index.js.zst"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "index.js.map"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "index.js.map.gz"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(outdir, "index.js.map.zst"))).toBe(true);
|
||||
|
||||
// Note: Source maps are not compressed in the current implementation
|
||||
// This could be added as a future enhancement
|
||||
|
||||
// Verify compressed file is valid
|
||||
// Verify compressed files are valid
|
||||
const jsGz = fs.readFileSync(path.join(outdir, "index.js.gz"));
|
||||
const mapGz = fs.readFileSync(path.join(outdir, "index.js.map.gz"));
|
||||
expect(() => zlib.gunzipSync(jsGz)).not.toThrow();
|
||||
expect(() => zlib.gunzipSync(mapGz)).not.toThrow();
|
||||
|
||||
// Verify zstd files have correct magic bytes
|
||||
const jsZst = fs.readFileSync(path.join(outdir, "index.js.zst"));
|
||||
const mapZst = fs.readFileSync(path.join(outdir, "index.js.map.zst"));
|
||||
expect(jsZst[0]).toBe(0x28);
|
||||
expect(jsZst[1]).toBe(0xb5);
|
||||
expect(mapZst[0]).toBe(0x28);
|
||||
expect(mapZst[1]).toBe(0xb5);
|
||||
});
|
||||
|
||||
test("large file compression works correctly", () => {
|
||||
|
||||
Reference in New Issue
Block a user