mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
feat(install): support npm overrides/yarn resolutions, one level deep only (#6435)
* disable zig fmt on generated ResolvedSourceTag.zig * overrides * it works * ok * a --------- Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
This commit is contained in:
5
.vscode/launch.json
generated
vendored
5
.vscode/launch.json
generated
vendored
@@ -306,12 +306,11 @@
|
||||
"request": "launch",
|
||||
"name": "bun install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "--frozen-lockfile"],
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_Lockfile": "1"
|
||||
"BUN_DEBUG_ALL": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -3005,16 +3005,31 @@ pub const PackageManager = struct {
|
||||
comptime successFn: SuccessFn,
|
||||
comptime failFn: ?FailFn,
|
||||
) !void {
|
||||
const name = dependency.realname();
|
||||
var name = dependency.realname();
|
||||
|
||||
const name_hash = switch (dependency.version.tag) {
|
||||
var name_hash = switch (dependency.version.tag) {
|
||||
.dist_tag, .git, .github, .npm, .tarball, .workspace => String.Builder.stringHash(this.lockfile.str(&name)),
|
||||
else => dependency.name_hash,
|
||||
};
|
||||
const version = dependency.version;
|
||||
const version = version: {
|
||||
if (this.lockfile.overrides.get(name_hash)) |new| {
|
||||
debug("override: {s} -> {s}", .{ this.lockfile.str(&dependency.version.literal), this.lockfile.str(&new.literal) });
|
||||
name = switch (new.tag) {
|
||||
.dist_tag => new.value.dist_tag.name,
|
||||
.git => new.value.git.package_name,
|
||||
.github => new.value.github.package_name,
|
||||
.npm => new.value.npm.name,
|
||||
.tarball => new.value.tarball.package_name,
|
||||
else => name,
|
||||
};
|
||||
name_hash = String.Builder.stringHash(this.lockfile.str(&name));
|
||||
break :version new;
|
||||
}
|
||||
break :version dependency.version;
|
||||
};
|
||||
var loaded_manifest: ?Npm.PackageManifest = null;
|
||||
|
||||
switch (dependency.version.tag) {
|
||||
switch (version.tag) {
|
||||
.dist_tag, .folder, .npm => {
|
||||
retry_from_manifests_ptr: while (true) {
|
||||
var resolve_result_ = this.getOrPutResolvedPackage(
|
||||
@@ -3127,13 +3142,13 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = {d}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
result.package.meta.id,
|
||||
},
|
||||
);
|
||||
} else if (dependency.version.tag.isNPM()) {
|
||||
} else if (version.tag.isNPM()) {
|
||||
const name_str = this.lockfile.str(&name);
|
||||
const task_id = Task.Id.forManifest(name_str);
|
||||
|
||||
@@ -3144,7 +3159,7 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = task {d}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
task_id,
|
||||
@@ -3165,8 +3180,8 @@ pub const PackageManager = struct {
|
||||
|
||||
// If it's an exact package version already living in the cache
|
||||
// We can skip the network request, even if it's beyond the caching period
|
||||
if (dependency.version.tag == .npm and dependency.version.value.npm.version.isExact()) {
|
||||
if (loaded_manifest.?.findByVersion(dependency.version.value.npm.version.head.head.range.left.version)) |find_result| {
|
||||
if (version.tag == .npm and version.value.npm.version.isExact()) {
|
||||
if (loaded_manifest.?.findByVersion(version.value.npm.version.head.head.range.left.version)) |find_result| {
|
||||
if (this.getOrPutResolvedPackageWithFindResult(
|
||||
name_hash,
|
||||
name,
|
||||
@@ -3233,7 +3248,7 @@ pub const PackageManager = struct {
|
||||
return;
|
||||
},
|
||||
.git => {
|
||||
const dep = &dependency.version.value.git;
|
||||
const dep = &version.value.git;
|
||||
const res = Resolution{
|
||||
.tag = .git,
|
||||
.value = .{
|
||||
@@ -3261,7 +3276,7 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = {s}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
url,
|
||||
@@ -3312,7 +3327,7 @@ pub const PackageManager = struct {
|
||||
}
|
||||
},
|
||||
.github => {
|
||||
const dep = &dependency.version.value.github;
|
||||
const dep = &version.value.github;
|
||||
const res = Resolution{
|
||||
.tag = .github,
|
||||
.value = .{
|
||||
@@ -3339,7 +3354,7 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = {s}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
url,
|
||||
@@ -3420,7 +3435,7 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = {d}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
result.package.meta.id,
|
||||
@@ -3475,7 +3490,7 @@ pub const PackageManager = struct {
|
||||
}
|
||||
},
|
||||
.tarball => {
|
||||
const res: Resolution = switch (dependency.version.value.tarball.uri) {
|
||||
const res: Resolution = switch (version.value.tarball.uri) {
|
||||
.local => |path| .{
|
||||
.tag = .local_tarball,
|
||||
.value = .{
|
||||
@@ -3496,7 +3511,7 @@ pub const PackageManager = struct {
|
||||
return;
|
||||
}
|
||||
|
||||
const url = switch (dependency.version.value.tarball.uri) {
|
||||
const url = switch (version.value.tarball.uri) {
|
||||
.local => |path| this.lockfile.str(&path),
|
||||
.remote => |url| this.lockfile.str(&url),
|
||||
};
|
||||
@@ -3511,7 +3526,7 @@ pub const PackageManager = struct {
|
||||
"enqueueDependency({d}, {s}, {s}, {s}) = {s}",
|
||||
.{
|
||||
id,
|
||||
@tagName(dependency.version.tag),
|
||||
@tagName(version.tag),
|
||||
this.lockfile.str(&name),
|
||||
this.lockfile.str(&version.literal),
|
||||
url,
|
||||
@@ -3522,7 +3537,7 @@ pub const PackageManager = struct {
|
||||
try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id));
|
||||
|
||||
if (dependency.behavior.isPeer()) return;
|
||||
switch (dependency.version.value.tarball.uri) {
|
||||
switch (version.value.tarball.uri) {
|
||||
.local => {
|
||||
const network_entry = try this.network_dedupe_map.getOrPutContext(this.allocator, task_id, .{});
|
||||
if (network_entry.found_existing) return;
|
||||
@@ -7855,6 +7870,8 @@ pub const PackageManager = struct {
|
||||
new_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder);
|
||||
}
|
||||
|
||||
lockfile.overrides.count(&lockfile, builder);
|
||||
|
||||
maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder);
|
||||
|
||||
const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len));
|
||||
@@ -7868,6 +7885,8 @@ pub const PackageManager = struct {
|
||||
manager.root_dependency_list = dep_lists[0];
|
||||
try builder.allocate();
|
||||
|
||||
manager.lockfile.overrides = try lockfile.overrides.clone(&lockfile, manager.lockfile, builder);
|
||||
|
||||
try manager.lockfile.buffers.dependencies.ensureUnusedCapacity(manager.lockfile.allocator, len);
|
||||
try manager.lockfile.buffers.resolutions.ensureUnusedCapacity(manager.lockfile.allocator, len);
|
||||
|
||||
@@ -7890,6 +7909,21 @@ pub const PackageManager = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (manager.summary.overrides_changed) {
|
||||
const dependency_name_hashes_to_check = manager.lockfile.overrides.map.keys();
|
||||
for (manager.lockfile.buffers.dependencies.items, 0..) |*dependency, dependency_i| {
|
||||
if (std.mem.indexOfScalar(PackageNameHash, dependency_name_hashes_to_check, dependency.name_hash)) |_| {
|
||||
manager.lockfile.buffers.resolutions.items[dependency_i] = invalid_package_id;
|
||||
try manager.enqueueDependencyWithMain(
|
||||
@truncate(dependency_i),
|
||||
dependency,
|
||||
manager.lockfile.buffers.resolutions.items[dependency_i],
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone(
|
||||
lockfile.buffers.string_bytes.items,
|
||||
*Lockfile.StringBuilder,
|
||||
|
||||
@@ -112,6 +112,8 @@ trusted_dependencies: NameHashSet = .{},
|
||||
workspace_paths: NameHashMap = .{},
|
||||
workspace_versions: VersionHashMap = .{},
|
||||
|
||||
overrides: OverrideMap = .{},
|
||||
|
||||
const Stream = std.io.FixedBufferStream([]u8);
|
||||
pub const default_filename = "bun.lockb";
|
||||
|
||||
@@ -209,6 +211,7 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log
|
||||
this.trusted_dependencies = .{};
|
||||
this.workspace_paths = .{};
|
||||
this.workspace_versions = .{};
|
||||
this.overrides = .{};
|
||||
|
||||
Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| {
|
||||
return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } };
|
||||
@@ -731,6 +734,13 @@ pub fn cleanWithLogger(
|
||||
|
||||
old.scratch.dependency_list_queue.head = 0;
|
||||
|
||||
{
|
||||
var builder = new.stringBuilder();
|
||||
old.overrides.count(old, &builder);
|
||||
try builder.allocate();
|
||||
new.overrides = try old.overrides.clone(old, new, &builder);
|
||||
}
|
||||
|
||||
// Step 1. Recreate the lockfile with only the packages that are still alive
|
||||
const root = old.rootPackage() orelse return error.NoPackage;
|
||||
|
||||
@@ -843,6 +853,7 @@ pub fn cleanWithLogger(
|
||||
}
|
||||
new.trusted_dependencies = old_trusted_dependencies;
|
||||
new.scripts = old_scripts;
|
||||
|
||||
return new;
|
||||
}
|
||||
|
||||
@@ -1885,6 +1896,299 @@ pub const PackageIndex = struct {
|
||||
};
|
||||
};
|
||||
|
||||
pub const OverrideMap = struct {
|
||||
const debug = Output.scoped(.OverrideMap, false);
|
||||
|
||||
map: std.ArrayHashMapUnmanaged(PackageNameHash, Dependency, ArrayIdentityContext.U64, false) = .{},
|
||||
|
||||
/// In the future, this `get` function should handle multi-level resolutions. This is difficult right
|
||||
/// now because given a Dependency ID, there is no fast way to trace it to it's package.
|
||||
///
|
||||
/// A potential approach is to add another buffer to the lockfile that maps Dependency ID to Package ID,
|
||||
/// and from there `OverrideMap.map` can have a union as the value, where the union is between "override all"
|
||||
/// and "here is a list of overrides depending on the package that imported" similar to PackageIndex above.
|
||||
pub fn get(this: *const OverrideMap, name_hash: PackageNameHash) ?Dependency.Version {
|
||||
debug("looking up override for {x}", .{name_hash});
|
||||
return if (this.map.get(name_hash)) |dep|
|
||||
dep.version
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *OverrideMap, allocator: Allocator) void {
|
||||
this.map.deinit(allocator);
|
||||
}
|
||||
|
||||
pub fn count(this: *OverrideMap, lockfile: *Lockfile, builder: *Lockfile.StringBuilder) void {
|
||||
for (this.map.values()) |dep| {
|
||||
dep.count(lockfile.buffers.string_bytes.items, @TypeOf(builder), builder);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clone(this: *OverrideMap, old_lockfile: *Lockfile, new_lockfile: *Lockfile, new_builder: *Lockfile.StringBuilder) !OverrideMap {
|
||||
var new = OverrideMap{};
|
||||
try new.map.ensureTotalCapacity(new_lockfile.allocator, this.map.entries.len);
|
||||
|
||||
for (this.map.keys(), this.map.values()) |k, v| {
|
||||
new.map.putAssumeCapacity(
|
||||
k,
|
||||
try v.clone(old_lockfile.buffers.string_bytes.items, @TypeOf(new_builder), new_builder),
|
||||
);
|
||||
}
|
||||
|
||||
return new;
|
||||
}
|
||||
|
||||
// the rest of this struct is expression parsing code:
|
||||
|
||||
pub fn parseCount(
|
||||
_: *OverrideMap,
|
||||
lockfile: *Lockfile,
|
||||
expr: Expr,
|
||||
builder: *Lockfile.StringBuilder,
|
||||
) void {
|
||||
if (expr.asProperty("overrides")) |overrides| {
|
||||
if (overrides.expr.data != .e_object)
|
||||
return;
|
||||
|
||||
for (overrides.expr.data.e_object.properties.slice()) |entry| {
|
||||
builder.count(entry.key.?.asString(lockfile.allocator).?);
|
||||
switch (entry.value.?.data) {
|
||||
.e_string => |s| {
|
||||
builder.count(s.slice(lockfile.allocator));
|
||||
},
|
||||
.e_object => {
|
||||
if (entry.value.?.asProperty(".")) |dot| {
|
||||
if (dot.expr.asString(lockfile.allocator)) |s| {
|
||||
builder.count(s);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
} else if (expr.asProperty("resolutions")) |resolutions| {
|
||||
if (resolutions.expr.data != .e_object)
|
||||
return;
|
||||
|
||||
for (resolutions.expr.data.e_object.properties.slice()) |entry| {
|
||||
builder.count(entry.key.?.asString(lockfile.allocator).?);
|
||||
builder.count(entry.value.?.asString(lockfile.allocator) orelse continue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a package json expression, detect and parse override configuration into the given override map.
|
||||
/// It is assumed the input map is uninitialized (zero entries)
|
||||
pub fn parseAppend(
|
||||
this: *OverrideMap,
|
||||
lockfile: *Lockfile,
|
||||
root_package: *Lockfile.Package,
|
||||
log: *logger.Log,
|
||||
json_source: logger.Source,
|
||||
expr: Expr,
|
||||
builder: *Lockfile.StringBuilder,
|
||||
) !void {
|
||||
if (Environment.allow_assert) {
|
||||
std.debug.assert(this.map.entries.len == 0); // only call parse once
|
||||
}
|
||||
if (expr.asProperty("overrides")) |overrides| {
|
||||
try this.parseFromOverrides(lockfile, root_package, json_source, log, overrides.expr, builder);
|
||||
} else if (expr.asProperty("resolutions")) |resolutions| {
|
||||
try this.parseFromResolutions(lockfile, root_package, json_source, log, resolutions.expr, builder);
|
||||
}
|
||||
debug("parsed {d} overrides", .{this.map.entries.len});
|
||||
}
|
||||
|
||||
/// https://docs.npmjs.com/cli/v9/configuring-npm/package-json#overrides
|
||||
pub fn parseFromOverrides(
|
||||
this: *OverrideMap,
|
||||
lockfile: *Lockfile,
|
||||
root_package: *Lockfile.Package,
|
||||
source: logger.Source,
|
||||
log: *logger.Log,
|
||||
expr: Expr,
|
||||
builder: *Lockfile.StringBuilder,
|
||||
) !void {
|
||||
if (expr.data != .e_object) {
|
||||
try log.addWarningFmt(&source, expr.loc, lockfile.allocator, "\"overrides\" must be an object", .{});
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
try this.map.ensureUnusedCapacity(lockfile.allocator, expr.data.e_object.properties.len);
|
||||
|
||||
for (expr.data.e_object.properties.slice()) |prop| {
|
||||
const key = prop.key.?;
|
||||
var k = key.asString(lockfile.allocator).?;
|
||||
if (k.len == 0) {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Missing overridden package name", .{});
|
||||
continue;
|
||||
}
|
||||
|
||||
const name_hash = String.Builder.stringHash(k);
|
||||
|
||||
const value = value: {
|
||||
// for one level deep, we will only support a string and { ".": value }
|
||||
const value_expr = prop.value.?;
|
||||
if (value_expr.data == .e_string) {
|
||||
break :value value_expr;
|
||||
} else if (value_expr.data == .e_object) {
|
||||
if (value_expr.asProperty(".")) |dot| {
|
||||
if (dot.expr.data == .e_string) {
|
||||
if (value_expr.data.e_object.properties.len > 1) {
|
||||
try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{});
|
||||
}
|
||||
break :value dot.expr;
|
||||
} else {
|
||||
try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k});
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{});
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k});
|
||||
continue;
|
||||
};
|
||||
|
||||
if (try parseOverrideValue(
|
||||
"override",
|
||||
lockfile,
|
||||
root_package,
|
||||
source,
|
||||
value.loc,
|
||||
log,
|
||||
k,
|
||||
value.data.e_string.slice(lockfile.allocator),
|
||||
builder,
|
||||
)) |version| {
|
||||
this.map.putAssumeCapacity(name_hash, version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// yarn classic: https://classic.yarnpkg.com/lang/en/docs/selective-version-resolutions/
|
||||
/// yarn berry: https://yarnpkg.com/configuration/manifest#resolutions
|
||||
pub fn parseFromResolutions(
|
||||
this: *OverrideMap,
|
||||
lockfile: *Lockfile,
|
||||
root_package: *Lockfile.Package,
|
||||
source: logger.Source,
|
||||
log: *logger.Log,
|
||||
expr: Expr,
|
||||
builder: *Lockfile.StringBuilder,
|
||||
) !void {
|
||||
if (expr.data != .e_object) {
|
||||
try log.addWarningFmt(&source, expr.loc, lockfile.allocator, "\"resolutions\" must be an object with string values", .{});
|
||||
return;
|
||||
}
|
||||
try this.map.ensureUnusedCapacity(lockfile.allocator, expr.data.e_object.properties.len);
|
||||
for (expr.data.e_object.properties.slice()) |prop| {
|
||||
const key = prop.key.?;
|
||||
var k = key.asString(lockfile.allocator).?;
|
||||
if (strings.hasPrefixComptime(k, "**/"))
|
||||
k = k[3..];
|
||||
if (k.len == 0) {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Missing resolution package name", .{});
|
||||
continue;
|
||||
}
|
||||
const value = prop.value.?;
|
||||
if (value.data != .e_string) {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Expected string value for resolution \"{s}\"", .{k});
|
||||
continue;
|
||||
}
|
||||
// currently we only support one level deep, so we should error if there are more than one
|
||||
// - "foo/bar":
|
||||
// - "@namespace/hello/world"
|
||||
if (k[0] == '@') {
|
||||
const first_slash = strings.indexOfChar(k, '/') orelse {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Invalid package name \"{s}\"", .{k});
|
||||
continue;
|
||||
};
|
||||
if (strings.indexOfChar(k[first_slash + 1 ..], '/') != null) {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{});
|
||||
continue;
|
||||
}
|
||||
} else if (strings.indexOfChar(k, '/') != null) {
|
||||
try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parseOverrideValue(
|
||||
"resolution",
|
||||
lockfile,
|
||||
root_package,
|
||||
source,
|
||||
value.loc,
|
||||
log,
|
||||
k,
|
||||
value.data.e_string.data,
|
||||
builder,
|
||||
)) |version| {
|
||||
const name_hash = String.Builder.stringHash(k);
|
||||
this.map.putAssumeCapacity(name_hash, version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parseOverrideValue(
|
||||
comptime field: []const u8,
|
||||
lockfile: *Lockfile,
|
||||
root_package: *Lockfile.Package,
|
||||
source: logger.Source,
|
||||
loc: logger.Loc,
|
||||
log: *logger.Log,
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
builder: *Lockfile.StringBuilder,
|
||||
) !?Dependency {
|
||||
if (value.len == 0) {
|
||||
try log.addWarningFmt(&source, loc, lockfile.allocator, "Missing " ++ field ++ " value", .{});
|
||||
return null;
|
||||
}
|
||||
|
||||
// "Overrides may also be defined as a reference to a spec for a direct dependency
|
||||
// by prefixing the name of the package you wish the version to match with a `$`"
|
||||
// https://docs.npmjs.com/cli/v9/configuring-npm/package-json#overrides
|
||||
// This is why a `*Lockfile.Package` is needed here.
|
||||
if (value[0] == '$') {
|
||||
const ref_name = value[1..];
|
||||
// This is fine for this string to not share the string pool, because it's only used for .eql()
|
||||
const ref_name_str = String.init(ref_name, ref_name);
|
||||
const pkg_deps: []const Dependency = root_package.dependencies.get(lockfile.buffers.dependencies.items);
|
||||
for (pkg_deps) |dep| {
|
||||
if (dep.name.eql(ref_name_str, lockfile.buffers.string_bytes.items, ref_name)) {
|
||||
return dep;
|
||||
}
|
||||
}
|
||||
try log.addWarningFmt(&source, loc, lockfile.allocator, "Could not resolve " ++ field ++ " \"{s}\" (you need \"{s}\" in your dependencies)", .{ value, ref_name });
|
||||
return null;
|
||||
}
|
||||
|
||||
const literalString = builder.append(String, value);
|
||||
const literalSliced = literalString.sliced(lockfile.buffers.string_bytes.items);
|
||||
|
||||
const name_hash = String.Builder.stringHash(key);
|
||||
const name = builder.appendWithHash(String, key, name_hash);
|
||||
|
||||
return Dependency{
|
||||
.name = name,
|
||||
.name_hash = name_hash,
|
||||
.version = Dependency.parse(
|
||||
lockfile.allocator,
|
||||
name,
|
||||
literalSliced.slice,
|
||||
&literalSliced,
|
||||
log,
|
||||
) orelse {
|
||||
try log.addWarningFmt(&source, loc, lockfile.allocator, "Invalid " ++ field ++ " value \"{s}\"", .{value});
|
||||
return null;
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const FormatVersion = enum(u32) {
|
||||
v0 = 0,
|
||||
// bun v0.0.x - bun v0.1.6
|
||||
@@ -2508,6 +2812,7 @@ pub const Package = extern struct {
|
||||
add: u32 = 0,
|
||||
remove: u32 = 0,
|
||||
update: u32 = 0,
|
||||
overrides_changed: bool = false,
|
||||
|
||||
pub inline fn sum(this: *Summary, that: Summary) void {
|
||||
this.add += that.add;
|
||||
@@ -2516,7 +2821,7 @@ pub const Package = extern struct {
|
||||
}
|
||||
|
||||
pub inline fn hasDiffs(this: Summary) bool {
|
||||
return this.add > 0 or this.remove > 0 or this.update > 0;
|
||||
return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2537,6 +2842,22 @@ pub const Package = extern struct {
|
||||
var to_i: usize = 0;
|
||||
var skipped_workspaces: usize = 0;
|
||||
|
||||
if (from_lockfile.overrides.map.count() != to_lockfile.overrides.map.count()) {
|
||||
summary.overrides_changed = true;
|
||||
} else {
|
||||
for (
|
||||
from_lockfile.overrides.map.keys(),
|
||||
from_lockfile.overrides.map.values(),
|
||||
to_lockfile.overrides.map.keys(),
|
||||
to_lockfile.overrides.map.values(),
|
||||
) |from_k, *from_override, to_k, *to_override| {
|
||||
if ((from_k != to_k) or (!from_override.eql(to_override, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items))) {
|
||||
summary.overrides_changed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (from_deps, 0..) |*from_dep, i| {
|
||||
found: {
|
||||
const prev_i = to_i;
|
||||
@@ -3547,6 +3868,10 @@ pub const Package = extern struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime features.is_main) {
|
||||
lockfile.overrides.parseCount(lockfile, json, &string_builder);
|
||||
}
|
||||
|
||||
try string_builder.allocate();
|
||||
try lockfile.buffers.dependencies.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
|
||||
try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
|
||||
@@ -3768,6 +4093,11 @@ pub const Package = extern struct {
|
||||
lockfile.buffers.dependencies.items = lockfile.buffers.dependencies.items.ptr[0..new_len];
|
||||
lockfile.buffers.resolutions.items = lockfile.buffers.resolutions.items.ptr[0..new_len];
|
||||
|
||||
// This function depends on package.dependencies being set, so it is done at the very end.
|
||||
if (comptime features.is_main) {
|
||||
try lockfile.overrides.parseAppend(lockfile, package, log, source, json, &string_builder);
|
||||
}
|
||||
|
||||
string_builder.clamp();
|
||||
}
|
||||
|
||||
@@ -3969,6 +4299,7 @@ pub fn deinit(this: *Lockfile) void {
|
||||
this.trusted_dependencies.deinit(this.allocator);
|
||||
this.workspace_paths.deinit(this.allocator);
|
||||
this.workspace_versions.deinit(this.allocator);
|
||||
this.overrides.deinit(this.allocator);
|
||||
}
|
||||
|
||||
const Buffers = struct {
|
||||
@@ -4273,6 +4604,7 @@ pub const Serializer = struct {
|
||||
|
||||
const has_workspace_package_ids_tag: u64 = @bitCast([_]u8{ 'w', 'O', 'r', 'K', 's', 'P', 'a', 'C' });
|
||||
const has_trusted_dependencies_tag: u64 = @bitCast([_]u8{ 't', 'R', 'u', 'S', 't', 'E', 'D', 'd' });
|
||||
const has_overrides_tag: u64 = @bitCast([_]u8{ 'o', 'V', 'e', 'R', 'r', 'i', 'D', 's' });
|
||||
|
||||
pub fn save(this: *Lockfile, comptime StreamType: type, stream: StreamType) !void {
|
||||
var old_package_list = this.packages;
|
||||
@@ -4347,6 +4679,34 @@ pub const Serializer = struct {
|
||||
);
|
||||
}
|
||||
|
||||
if (this.overrides.map.count() > 0) {
|
||||
try writer.writeAll(std.mem.asBytes(&has_overrides_tag));
|
||||
|
||||
try Lockfile.Buffers.writeArray(
|
||||
StreamType,
|
||||
stream,
|
||||
@TypeOf(&writer),
|
||||
&writer,
|
||||
[]PackageNameHash,
|
||||
this.overrides.map.keys(),
|
||||
);
|
||||
var external_overrides = try std.ArrayListUnmanaged(Dependency.External).initCapacity(z_allocator, this.overrides.map.count());
|
||||
defer external_overrides.deinit(z_allocator);
|
||||
external_overrides.items.len = this.overrides.map.count();
|
||||
for (external_overrides.items, this.overrides.map.values()) |*dest, src| {
|
||||
dest.* = src.toExternal();
|
||||
}
|
||||
|
||||
try Lockfile.Buffers.writeArray(
|
||||
StreamType,
|
||||
stream,
|
||||
@TypeOf(&writer),
|
||||
&writer,
|
||||
[]Dependency.External,
|
||||
external_overrides.items,
|
||||
);
|
||||
}
|
||||
|
||||
const end = try stream.getPos();
|
||||
|
||||
try writer.writeAll(&alignment_bytes_to_repeat_buffer);
|
||||
@@ -4482,6 +4842,42 @@ pub const Serializer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const remaining_in_buffer = total_buffer_size -| stream.pos;
|
||||
|
||||
if (remaining_in_buffer > 8 and total_buffer_size <= stream.buffer.len) {
|
||||
const next_num = try reader.readIntLittle(u64);
|
||||
if (next_num == has_overrides_tag) {
|
||||
var overrides_name_hashes = try Lockfile.Buffers.readArray(
|
||||
stream,
|
||||
allocator,
|
||||
std.ArrayListUnmanaged(PackageNameHash),
|
||||
);
|
||||
defer overrides_name_hashes.deinit(allocator);
|
||||
|
||||
var map = lockfile.overrides.map;
|
||||
defer lockfile.overrides.map = map;
|
||||
|
||||
try map.ensureTotalCapacity(allocator, overrides_name_hashes.items.len);
|
||||
var override_versions_external = try Lockfile.Buffers.readArray(
|
||||
stream,
|
||||
allocator,
|
||||
std.ArrayListUnmanaged(Dependency.External),
|
||||
);
|
||||
const context: Dependency.Context = .{
|
||||
.allocator = allocator,
|
||||
.log = log,
|
||||
.buffer = lockfile.buffers.string_bytes.items,
|
||||
};
|
||||
for (overrides_name_hashes.items, override_versions_external.items) |name, value| {
|
||||
map.putAssumeCapacity(name, Dependency.toDependency(value, context));
|
||||
}
|
||||
} else {
|
||||
stream.pos -= 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.scratch = Lockfile.Scratch.init(allocator);
|
||||
lockfile.package_index = PackageIndex.Map.initContext(allocator, .{});
|
||||
lockfile.string_pool = StringPool.initContext(allocator, .{});
|
||||
|
||||
@@ -576,23 +576,36 @@ pub const SlicedString = struct {
|
||||
slice: string,
|
||||
|
||||
pub inline fn init(buf: string, slice: string) SlicedString {
|
||||
if (Environment.allow_assert) {
|
||||
if (@intFromPtr(buf.ptr) > @intFromPtr(slice.ptr)) {
|
||||
@panic("SlicedString.init buf is not in front of slice");
|
||||
}
|
||||
}
|
||||
return SlicedString{ .buf = buf, .slice = slice };
|
||||
}
|
||||
|
||||
pub inline fn external(this: SlicedString) ExternalString {
|
||||
if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
|
||||
if (comptime Environment.allow_assert) {
|
||||
std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
|
||||
}
|
||||
|
||||
return ExternalString.init(this.buf, this.slice, bun.Wyhash.hash(0, this.slice));
|
||||
}
|
||||
|
||||
pub inline fn value(this: SlicedString) String {
|
||||
if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
|
||||
if (comptime Environment.allow_assert) {
|
||||
std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
|
||||
}
|
||||
|
||||
return String.init(this.buf, this.slice);
|
||||
}
|
||||
|
||||
pub inline fn sub(this: SlicedString, input: string) SlicedString {
|
||||
std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.buf.ptr) and ((@intFromPtr(input.ptr) + input.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
|
||||
if (Environment.allow_assert) {
|
||||
if (!(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.buf.ptr) and ((@intFromPtr(input.ptr) + input.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)))) {
|
||||
@panic("SlicedString.sub input is not a substring of the slice");
|
||||
}
|
||||
}
|
||||
return SlicedString{ .buf = this.buf, .slice = input };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -381,7 +381,8 @@ static constexpr ASCIILiteral ${idToEnumName(id)}Code = ${fmtCPPString(bundledOu
|
||||
// This is a generated enum for zig code (exports.zig)
|
||||
fs.writeFileSync(
|
||||
path.join(BASE, "out/ResolvedSourceTag.zig"),
|
||||
`pub const ResolvedSourceTag = enum(u32) {
|
||||
`// zig fmt: off
|
||||
pub const ResolvedSourceTag = enum(u32) {
|
||||
// Predefined
|
||||
javascript = 0,
|
||||
package_json_type_module = 1,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
// zig fmt: off
|
||||
pub const ResolvedSourceTag = enum(u32) {
|
||||
// Predefined
|
||||
javascript = 0,
|
||||
@@ -64,16 +65,16 @@ pub const ResolvedSourceTag = enum(u32) {
|
||||
@"node:wasi" = 563,
|
||||
@"node:worker_threads" = 564,
|
||||
@"node:zlib" = 565,
|
||||
depd = 566,
|
||||
@"depd" = 566,
|
||||
@"detect-libc" = 567,
|
||||
@"detect-libc/linux" = 568,
|
||||
@"isomorphic-fetch" = 569,
|
||||
@"node-fetch" = 570,
|
||||
undici = 571,
|
||||
vercel_fetch = 572,
|
||||
ws = 573,
|
||||
@"undici" = 571,
|
||||
@"vercel_fetch" = 572,
|
||||
@"ws" = 573,
|
||||
// Native modules run through a different system using ESM registry.
|
||||
bun = 1024,
|
||||
@"bun" = 1024,
|
||||
@"bun:jsc" = 1025,
|
||||
@"node:buffer" = 1026,
|
||||
@"node:constants" = 1027,
|
||||
|
||||
165
test/cli/install/overrides.test.ts
Normal file
165
test/cli/install/overrides.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { mkdtempSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
|
||||
function install(cwd: string, args: string[]) {
|
||||
const exec = Bun.spawnSync({
|
||||
cmd: [bunExe(), ...args],
|
||||
cwd,
|
||||
stdout: "pipe",
|
||||
stdin: "ignore",
|
||||
stderr: "inherit",
|
||||
env: bunEnv,
|
||||
});
|
||||
if (exec.exitCode !== 0) {
|
||||
throw new Error(`bun install exited with code ${exec.exitCode}`);
|
||||
}
|
||||
return exec;
|
||||
}
|
||||
|
||||
function installExpectFail(cwd: string, args: string[]) {
|
||||
const exec = Bun.spawnSync({
|
||||
cmd: [bunExe(), ...args],
|
||||
cwd,
|
||||
stdout: "pipe",
|
||||
stdin: "ignore",
|
||||
stderr: "inherit",
|
||||
env: bunEnv,
|
||||
});
|
||||
if (exec.exitCode === 0) {
|
||||
throw new Error(`bun install exited with code ${exec.exitCode}, (expected failure)`);
|
||||
}
|
||||
return exec;
|
||||
}
|
||||
|
||||
function versionOf(cwd: string, path: string) {
|
||||
const data = readFileSync(join(cwd, path));
|
||||
const json = JSON.parse(data.toString());
|
||||
return json.version;
|
||||
}
|
||||
|
||||
function ensureLockfileDoesntChangeOnBunI(cwd: string) {
|
||||
install(cwd, ["install"]);
|
||||
const lockb_hash = new Bun.CryptoHasher("sha256").update(readFileSync(join(cwd, "bun.lockb"))).digest();
|
||||
install(cwd, ["install", "--frozen-lockfile"]);
|
||||
install(cwd, ["install", "--force"]);
|
||||
const lockb_hash2 = new Bun.CryptoHasher("sha256").update(readFileSync(join(cwd, "bun.lockb"))).digest();
|
||||
expect(lockb_hash).toEqual(lockb_hash2);
|
||||
}
|
||||
|
||||
test("overrides affect your own packages", async () => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "bun-pm-test"));
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
dependencies: {},
|
||||
overrides: {
|
||||
lodash: "4.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install", "lodash"]);
|
||||
expect(versionOf(tmp, "node_modules/lodash/package.json")).toBe("4.0.0");
|
||||
|
||||
ensureLockfileDoesntChangeOnBunI(tmp);
|
||||
});
|
||||
|
||||
test("overrides affects all dependencies", async () => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "bun-pm-test"));
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
dependencies: {},
|
||||
overrides: {
|
||||
bytes: "1.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install", "express@4.18.2"]);
|
||||
expect(versionOf(tmp, "node_modules/bytes/package.json")).toBe("1.0.0");
|
||||
|
||||
ensureLockfileDoesntChangeOnBunI(tmp);
|
||||
});
|
||||
|
||||
test("overrides being set later affects all dependencies", async () => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "bun-pm-test"));
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
dependencies: {},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install", "express@4.18.2"]);
|
||||
expect(versionOf(tmp, "node_modules/bytes/package.json")).not.toBe("1.0.0");
|
||||
|
||||
ensureLockfileDoesntChangeOnBunI(tmp);
|
||||
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
...JSON.parse(readFileSync(join(tmp, "package.json")).toString()),
|
||||
overrides: {
|
||||
bytes: "1.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install"]);
|
||||
expect(versionOf(tmp, "node_modules/bytes/package.json")).toBe("1.0.0");
|
||||
|
||||
ensureLockfileDoesntChangeOnBunI(tmp);
|
||||
});
|
||||
|
||||
test("overrides to npm specifier", async () => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "bun-pm-test"));
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
dependencies: {},
|
||||
overrides: {
|
||||
bytes: "npm:lodash@4.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install", "express@4.18.2"]);
|
||||
|
||||
// BUG: the npm specifier is hoisted https://github.com/oven-sh/bun/issues/6433
|
||||
// const bytes = JSON.parse(readFileSync(join(tmp, "node_modules/bytes/package.json"), "utf-8"));
|
||||
const bytes = JSON.parse(
|
||||
readFileSync(join(tmp, "node_modules/body-parser/node_modules/bytes/package.json"), "utf-8"),
|
||||
);
|
||||
|
||||
expect(bytes.name).toBe("lodash");
|
||||
expect(bytes.version).toBe("4.0.0");
|
||||
|
||||
ensureLockfileDoesntChangeOnBunI(tmp);
|
||||
});
|
||||
|
||||
test("changing overrides makes the lockfile changed, prevent frozen install", async () => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "bun-pm-test"));
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
dependencies: {},
|
||||
overrides: {
|
||||
bytes: "1.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
install(tmp, ["install", "express@4.18.2"]);
|
||||
|
||||
writeFileSync(
|
||||
join(tmp, "package.json"),
|
||||
JSON.stringify({
|
||||
...JSON.parse(readFileSync(join(tmp, "package.json")).toString()),
|
||||
overrides: {
|
||||
bytes: "1.0.1",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
installExpectFail(tmp, ["install", "--frozen-lockfile"]);
|
||||
});
|
||||
|
||||
// frozen lockfile
|
||||
Reference in New Issue
Block a user