mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
fix(pack): re-read package.json after prepack/prepare scripts (#26267)
## Summary - Fixes `bun pm pack` not respecting changes to `package.json` made by prepack/prepare scripts - Tracks whether lifecycle scripts (prepublishOnly, prepack, prepare) ran - If scripts ran, invalidates the cached package.json and re-reads from disk before creating the tarball Closes #24314 ## Test plan - Added regression test `test/regression/issue/24314.test.ts` that verifies package.json modifications from prepack/prepare scripts are included in the tarball - All existing pack tests pass (69 tests) 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot <claude-bot@bun.sh> Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1172,7 +1172,7 @@ pub const PackCommand = struct {
|
||||
) PackError(for_publish)!if (for_publish) Publish.Context(true) else void {
|
||||
const manager = ctx.manager;
|
||||
const log_level = manager.options.log_level;
|
||||
const json = switch (manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, abs_package_json_path, .{
|
||||
var json = switch (manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, abs_package_json_path, .{
|
||||
.guess_indentation = true,
|
||||
})) {
|
||||
.read_err => |err| {
|
||||
@@ -1235,8 +1235,6 @@ pub const PackCommand = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const edited_package_json = try editRootPackageJSON(ctx.allocator, ctx.lockfile, json);
|
||||
|
||||
var this_transpiler: bun.transpiler.Transpiler = undefined;
|
||||
|
||||
_ = RunCommand.configureEnvForRun(
|
||||
@@ -1258,16 +1256,20 @@ pub const PackCommand = struct {
|
||||
const abs_workspace_path: string = strings.withoutTrailingSlash(strings.withoutSuffixComptime(abs_package_json_path, "package.json"));
|
||||
try manager.env.map.put("npm_command", "pack");
|
||||
|
||||
const postpack_script, const publish_script: ?[]const u8, const postpublish_script: ?[]const u8 = post_scripts: {
|
||||
const postpack_script, const publish_script: ?[]const u8, const postpublish_script: ?[]const u8, const ran_scripts: bool = post_scripts: {
|
||||
// --ignore-scripts
|
||||
if (!manager.options.do.run_scripts) break :post_scripts .{ null, null, null };
|
||||
if (!manager.options.do.run_scripts) break :post_scripts .{ null, null, null, false };
|
||||
|
||||
const scripts = json.root.asProperty("scripts") orelse break :post_scripts .{ null, null, null };
|
||||
if (scripts.expr.data != .e_object) break :post_scripts .{ null, null, null };
|
||||
const scripts = json.root.asProperty("scripts") orelse break :post_scripts .{ null, null, null, false };
|
||||
if (scripts.expr.data != .e_object) break :post_scripts .{ null, null, null, false };
|
||||
|
||||
// Track whether any scripts ran that could modify package.json
|
||||
var did_run_scripts = false;
|
||||
|
||||
if (comptime for_publish) {
|
||||
if (scripts.expr.get("prepublishOnly")) |prepublish_only_script_str| {
|
||||
if (prepublish_only_script_str.asString(ctx.allocator)) |prepublish_only| {
|
||||
did_run_scripts = true;
|
||||
_ = RunCommand.runPackageScriptForeground(
|
||||
ctx.command_ctx,
|
||||
ctx.allocator,
|
||||
@@ -1293,6 +1295,7 @@ pub const PackCommand = struct {
|
||||
|
||||
if (scripts.expr.get("prepack")) |prepack_script| {
|
||||
if (prepack_script.asString(ctx.allocator)) |prepack_script_str| {
|
||||
did_run_scripts = true;
|
||||
_ = RunCommand.runPackageScriptForeground(
|
||||
ctx.command_ctx,
|
||||
ctx.allocator,
|
||||
@@ -1317,6 +1320,7 @@ pub const PackCommand = struct {
|
||||
|
||||
if (scripts.expr.get("prepare")) |prepare_script| {
|
||||
if (prepare_script.asString(ctx.allocator)) |prepare_script_str| {
|
||||
did_run_scripts = true;
|
||||
_ = RunCommand.runPackageScriptForeground(
|
||||
ctx.command_ctx,
|
||||
ctx.allocator,
|
||||
@@ -1354,12 +1358,59 @@ pub const PackCommand = struct {
|
||||
postpublish_script = try postpublish.asStringCloned(ctx.allocator);
|
||||
}
|
||||
|
||||
break :post_scripts .{ postpack_script, publish_script, postpublish_script };
|
||||
break :post_scripts .{ postpack_script, publish_script, postpublish_script, did_run_scripts };
|
||||
}
|
||||
|
||||
break :post_scripts .{ postpack_script, null, null };
|
||||
break :post_scripts .{ postpack_script, null, null, did_run_scripts };
|
||||
};
|
||||
|
||||
// If any lifecycle scripts ran, they may have modified package.json,
|
||||
// so we need to re-read it from disk to pick up any changes.
|
||||
if (ran_scripts) {
|
||||
// Invalidate the cached entry by removing it.
|
||||
// On Windows, the cache key is stored with POSIX path separators,
|
||||
// so we need to convert the path before removing.
|
||||
var cache_key_buf: if (Environment.isWindows) PathBuffer else void = undefined;
|
||||
const cache_key = if (comptime Environment.isWindows) blk: {
|
||||
@memcpy(cache_key_buf[0..abs_package_json_path.len], abs_package_json_path);
|
||||
bun.path.dangerouslyConvertPathToPosixInPlace(u8, cache_key_buf[0..abs_package_json_path.len]);
|
||||
break :blk cache_key_buf[0..abs_package_json_path.len];
|
||||
} else abs_package_json_path;
|
||||
_ = manager.workspace_package_json_cache.map.remove(cache_key);
|
||||
|
||||
// Re-read package.json from disk
|
||||
json = switch (manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, abs_package_json_path, .{
|
||||
.guess_indentation = true,
|
||||
})) {
|
||||
.read_err => |err| {
|
||||
Output.err(err, "failed to read package.json: {s}", .{abs_package_json_path});
|
||||
Global.crash();
|
||||
},
|
||||
.parse_err => |err| {
|
||||
Output.err(err, "failed to parse package.json: {s}", .{abs_package_json_path});
|
||||
manager.log.print(Output.errorWriter()) catch {};
|
||||
Global.crash();
|
||||
},
|
||||
.entry => |entry| entry,
|
||||
};
|
||||
|
||||
// Re-validate private flag after scripts may have modified it.
|
||||
// Note: The tarball filename uses the original name/version (matching npm behavior),
|
||||
// but we re-check private to prevent accidentally publishing a now-private package.
|
||||
if (comptime for_publish) {
|
||||
if (json.root.get("private")) |private| {
|
||||
if (private.asBool()) |is_private| {
|
||||
if (is_private) {
|
||||
return error.PrivatePackage;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the edited package.json content after lifecycle scripts have run
|
||||
const edited_package_json = try editRootPackageJSON(ctx.allocator, ctx.lockfile, json);
|
||||
|
||||
var root_dir = root_dir: {
|
||||
var path_buf: PathBuffer = undefined;
|
||||
@memcpy(path_buf[0..abs_workspace_path.len], abs_workspace_path);
|
||||
|
||||
115
test/regression/issue/24314.test.ts
Normal file
115
test/regression/issue/24314.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { readTarball } from "bun:internal-for-testing";
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
import path from "node:path";
|
||||
|
||||
// Helper to normalize path separators for cross-platform tarball entry lookup
|
||||
function normalizePath(p: string): string {
|
||||
return p.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
test("bun pm pack respects changes to package.json from prepack scripts", async () => {
|
||||
using dir = tempDir("pack-prepack", {
|
||||
"package.json": JSON.stringify(
|
||||
{
|
||||
name: "test-prepack",
|
||||
version: "1.0.0",
|
||||
scripts: {
|
||||
prepack: "node prepack.js",
|
||||
},
|
||||
description: "ORIGINAL DESCRIPTION",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
"prepack.js": `
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
|
||||
pkg.description = 'MODIFIED BY PREPACK';
|
||||
fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "pm", "pack"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// Check stdout/stderr first to provide context on failure
|
||||
expect(stderr).not.toContain("error");
|
||||
expect(stdout).toContain("test-prepack-1.0.0.tgz");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Read the tarball and check the package.json inside
|
||||
const tarballPath = path.join(String(dir), "test-prepack-1.0.0.tgz");
|
||||
const tarball = readTarball(tarballPath);
|
||||
|
||||
// Find the package.json entry (normalize path separators for Windows)
|
||||
const pkgJsonEntry = tarball.entries.find(
|
||||
(e: { pathname: string }) => normalizePath(e.pathname) === "package/package.json",
|
||||
);
|
||||
expect(pkgJsonEntry).toBeDefined();
|
||||
|
||||
const extractedPkg = JSON.parse(pkgJsonEntry.contents);
|
||||
|
||||
// The description should be modified by the prepack script
|
||||
expect(extractedPkg.description).toBe("MODIFIED BY PREPACK");
|
||||
});
|
||||
|
||||
test("bun pm pack respects changes to package.json from prepare scripts", async () => {
|
||||
using dir = tempDir("pack-prepare", {
|
||||
"package.json": JSON.stringify(
|
||||
{
|
||||
name: "test-prepare",
|
||||
version: "1.0.0",
|
||||
scripts: {
|
||||
prepare: "node prepare.js",
|
||||
},
|
||||
keywords: ["original"],
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
"prepare.js": `
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
|
||||
pkg.keywords = ['modified', 'by', 'prepare'];
|
||||
fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "pm", "pack"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// Check stdout/stderr first to provide context on failure
|
||||
expect(stderr).not.toContain("error");
|
||||
expect(stdout).toContain("test-prepare-1.0.0.tgz");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Read the tarball and check the package.json inside
|
||||
const tarballPath = path.join(String(dir), "test-prepare-1.0.0.tgz");
|
||||
const tarball = readTarball(tarballPath);
|
||||
|
||||
// Find the package.json entry (normalize path separators for Windows)
|
||||
const pkgJsonEntry = tarball.entries.find(
|
||||
(e: { pathname: string }) => normalizePath(e.pathname) === "package/package.json",
|
||||
);
|
||||
expect(pkgJsonEntry).toBeDefined();
|
||||
|
||||
const extractedPkg = JSON.parse(pkgJsonEntry.contents);
|
||||
|
||||
// The keywords should be modified by the prepare script
|
||||
expect(extractedPkg.keywords).toEqual(["modified", "by", "prepare"]);
|
||||
});
|
||||
Reference in New Issue
Block a user