mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 18:38:55 +00:00
The deeply nested workspace deps test covers everything we need: - workspace:^ dependencies - Multiple conflicting versions (3 React versions) - 5-level deep nesting - Real Yarn Berry generated lockfile - Migration + bun ci verification 7/8 tests passing.
1748 lines
70 KiB
Zig
1748 lines
70 KiB
Zig
const Bin = @import("./bin.zig").Bin;
|
|
const Dependency = @import("./dependency.zig");
|
|
const Integrity = @import("./integrity.zig").Integrity;
|
|
const Resolution = @import("./resolution.zig").Resolution;
|
|
const Repository = @import("./repository.zig").Repository;
|
|
|
|
const Lockfile = @import("./lockfile.zig");
|
|
const LoadResult = Lockfile.LoadResult;
|
|
|
|
const bun = @import("bun");
|
|
const logger = bun.logger;
|
|
const strings = bun.strings;
|
|
const Npm = bun.install.Npm;
|
|
|
|
const Semver = bun.Semver;
|
|
const ExternalString = Semver.ExternalString;
|
|
const String = Semver.String;
|
|
|
|
const JSAst = bun.ast;
|
|
const Expr = JSAst.Expr;
|
|
|
|
const DependencyID = bun.install.DependencyID;
|
|
const ExtractTarball = bun.install.ExtractTarball;
|
|
const PackageID = bun.install.PackageID;
|
|
const PackageManager = bun.install.PackageManager;
|
|
const invalid_package_id = bun.install.invalid_package_id;
|
|
|
|
const std = @import("std");
|
|
const Allocator = std.mem.Allocator;
|
|
|
|
const OOM = bun.OOM;
|
|
const glob = bun.glob;
|
|
const YAML = bun.interchange.yaml.YAML;
|
|
|
|
const MigrateYarnLockfileError = OOM || error{
|
|
YarnLockfileVersionTooOld,
|
|
YarnLockfileUnsupportedVersion,
|
|
InvalidYarnLockfile,
|
|
YarnLockfileParseError,
|
|
WorkspaceNameMissing,
|
|
UnresolvableDependency,
|
|
NonExistentWorkspaceDependency,
|
|
InvalidPackageJson,
|
|
MissingPackageVersion,
|
|
DependencyLoop,
|
|
YarnBerryParseError,
|
|
InvalidYarnBerryLockfile,
|
|
YarnBerryVersionTooOld,
|
|
MissingRootPackageJson,
|
|
};
|
|
|
|
fn invalidYarnLockfile() error{InvalidYarnLockfile} {
|
|
return error.InvalidYarnLockfile;
|
|
}
|
|
|
|
pub fn migrateYarnLockfile(
|
|
lockfile: *Lockfile,
|
|
manager: *PackageManager,
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
data: []const u8,
|
|
dir: bun.FD,
|
|
) MigrateYarnLockfileError!LoadResult {
|
|
_ = dir;
|
|
|
|
lockfile.initEmpty(allocator);
|
|
bun.install.initializeStore();
|
|
|
|
const is_yarn_v1 = strings.hasPrefixComptime(data, "# yarn lockfile v1") or
|
|
strings.hasPrefixComptime(data, "# THIS IS AN AUTOGENERATED FILE");
|
|
|
|
const trimmed = strings.trim(data, " \t\n\r");
|
|
const is_berry = strings.hasPrefixComptime(trimmed, "{") or strings.contains(data, "__metadata:");
|
|
|
|
if (is_berry) {
|
|
bun.analytics.Features.yarn_berry_migration += 1;
|
|
return try migrateYarnBerry(lockfile, manager, allocator, log, data);
|
|
} else if (is_yarn_v1) {
|
|
bun.analytics.Features.yarn_migration += 1;
|
|
return try migrateYarnV1(lockfile, manager, allocator, log, data);
|
|
} else {
|
|
try log.addError(null, logger.Loc.Empty, "Invalid yarn.lock format. Expected '# yarn lockfile v1' header or JSON format for Yarn Berry");
|
|
return error.YarnLockfileVersionTooOld;
|
|
}
|
|
}
|
|
|
|
fn migrateYarnV1(
|
|
lockfile: *Lockfile,
|
|
manager: *PackageManager,
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
data: []const u8,
|
|
) MigrateYarnLockfileError!LoadResult {
|
|
const entries = try parseYarnV1Lockfile(data, allocator, log);
|
|
defer {
|
|
for (entries.items) |*entry| {
|
|
entry.deinit(allocator);
|
|
}
|
|
entries.deinit();
|
|
}
|
|
|
|
const pkg_map, const workspace_pkgs_off, const workspace_pkgs_end = build: {
|
|
var string_buf = lockfile.stringBuf();
|
|
|
|
var pkg_map: bun.StringArrayHashMap(PackageID) = .init(allocator);
|
|
|
|
try pkg_map.putNoClobber(bun.fs.FileSystem.instance.top_level_dir, 0);
|
|
|
|
var pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer pkg_json_path.deinit();
|
|
|
|
pkg_json_path.append("package.json");
|
|
|
|
const root_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, pkg_json_path.slice(), .{}).unwrap() catch {
|
|
return invalidYarnLockfile();
|
|
};
|
|
|
|
const root_json = root_pkg_json.root;
|
|
|
|
try scanWorkspaces(lockfile, manager, allocator, log, &root_json);
|
|
|
|
{
|
|
var root_pkg: Lockfile.Package = .{};
|
|
|
|
if (try root_json.getString(allocator, "name")) |name_info| {
|
|
const name, _ = name_info;
|
|
const name_hash = String.Builder.stringHash(name);
|
|
root_pkg.name = try string_buf.appendWithHash(name, name_hash);
|
|
root_pkg.name_hash = name_hash;
|
|
}
|
|
|
|
const root_deps_off, var root_deps_len = try parsePackageJsonDependencies(
|
|
lockfile,
|
|
manager,
|
|
allocator,
|
|
&root_json,
|
|
&string_buf,
|
|
log,
|
|
);
|
|
|
|
const workspace_deps_start = lockfile.buffers.dependencies.items.len;
|
|
for (lockfile.workspace_paths.values()) |workspace_path| {
|
|
var ws_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer ws_pkg_json_path.deinit();
|
|
|
|
ws_pkg_json_path.append(workspace_path.slice(string_buf.bytes.items));
|
|
ws_pkg_json_path.append("package.json");
|
|
|
|
const ws_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, ws_pkg_json_path.slice(), .{}).unwrap() catch continue;
|
|
const ws_json = ws_pkg_json.root;
|
|
|
|
const ws_name, _ = try ws_json.getString(allocator, "name") orelse continue;
|
|
const ws_name_hash = String.Builder.stringHash(ws_name);
|
|
|
|
const ws_dep: Dependency = .{
|
|
.name = try string_buf.appendWithHash(ws_name, ws_name_hash),
|
|
.name_hash = ws_name_hash,
|
|
.behavior = .{ .workspace = true },
|
|
.version = .{
|
|
.tag = .workspace,
|
|
.value = .{ .workspace = workspace_path },
|
|
},
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, ws_dep);
|
|
}
|
|
const workspace_deps_count: u32 = @intCast(lockfile.buffers.dependencies.items.len - workspace_deps_start);
|
|
root_deps_len += workspace_deps_count;
|
|
|
|
root_pkg.dependencies = .{ .off = root_deps_off, .len = root_deps_len };
|
|
root_pkg.resolutions = .{ .off = root_deps_off, .len = root_deps_len };
|
|
root_pkg.meta.id = 0;
|
|
root_pkg.resolution = .init(.{ .root = {} });
|
|
|
|
if (root_json.get("bin")) |bin_expr| {
|
|
root_pkg.bin = try Bin.parseAppend(allocator, bin_expr, &string_buf, &lockfile.buffers.extern_strings);
|
|
} else if (root_json.get("directories")) |directories_expr| {
|
|
if (directories_expr.get("bin")) |bin_expr| {
|
|
root_pkg.bin = try Bin.parseAppendFromDirectories(allocator, bin_expr, &string_buf);
|
|
}
|
|
}
|
|
|
|
try lockfile.packages.append(allocator, root_pkg);
|
|
try lockfile.getOrPutID(0, root_pkg.name_hash);
|
|
|
|
if (root_json.get("resolutions") orelse root_json.get("overrides")) |resolutions_expr| {
|
|
if (resolutions_expr.data == .e_object) {
|
|
try lockfile.overrides.map.ensureUnusedCapacity(allocator, resolutions_expr.data.e_object.properties.len);
|
|
|
|
for (resolutions_expr.data.e_object.properties.slice()) |prop| {
|
|
const key = prop.key.?;
|
|
const value = prop.value.?;
|
|
|
|
var name_str = key.asString(allocator) orelse continue;
|
|
const value_str = value.asString(allocator) orelse continue;
|
|
|
|
if (strings.hasPrefixComptime(name_str, "**/"))
|
|
name_str = name_str[3..];
|
|
|
|
if (name_str.len == 0) continue;
|
|
|
|
if (name_str[0] == '@') {
|
|
const first_slash = strings.indexOfChar(name_str, '/') orelse continue;
|
|
if (strings.indexOfChar(name_str[first_slash + 1 ..], '/') != null) {
|
|
continue;
|
|
}
|
|
} else if (strings.indexOfChar(name_str, '/') != null) {
|
|
continue;
|
|
}
|
|
|
|
const name_hash = String.Builder.stringHash(name_str);
|
|
const name = try string_buf.appendWithHash(name_str, name_hash);
|
|
const version_string = try string_buf.append(value_str);
|
|
const version_sliced = version_string.sliced(string_buf.bytes.items);
|
|
|
|
const dep: Dependency = .{
|
|
.name = name,
|
|
.name_hash = name_hash,
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
name,
|
|
name_hash,
|
|
version_sliced.slice,
|
|
&version_sliced,
|
|
log,
|
|
manager,
|
|
) orelse continue,
|
|
};
|
|
|
|
lockfile.overrides.map.putAssumeCapacity(name_hash, dep);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
const workspace_pkgs_off = lockfile.packages.len;
|
|
|
|
for (lockfile.workspace_paths.values()) |workspace_path| {
|
|
var ws_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer ws_pkg_json_path.deinit();
|
|
|
|
ws_pkg_json_path.append(workspace_path.slice(string_buf.bytes.items));
|
|
const abs_path = try allocator.dupe(u8, ws_pkg_json_path.slice());
|
|
ws_pkg_json_path.append("package.json");
|
|
|
|
const ws_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, ws_pkg_json_path.slice(), .{}).unwrap() catch continue;
|
|
const ws_json = ws_pkg_json.root;
|
|
|
|
const name, _ = try ws_json.getString(allocator, "name") orelse continue;
|
|
const name_hash = String.Builder.stringHash(name);
|
|
|
|
var pkg: Lockfile.Package = .{
|
|
.name = try string_buf.appendWithHash(name, name_hash),
|
|
.name_hash = name_hash,
|
|
.resolution = .init(.{ .workspace = workspace_path }),
|
|
};
|
|
|
|
const deps_off, const deps_len = try parsePackageJsonDependencies(
|
|
lockfile,
|
|
manager,
|
|
allocator,
|
|
&ws_json,
|
|
&string_buf,
|
|
log,
|
|
);
|
|
|
|
pkg.dependencies = .{ .off = deps_off, .len = deps_len };
|
|
pkg.resolutions = .{ .off = deps_off, .len = deps_len };
|
|
|
|
if (ws_json.get("bin")) |bin_expr| {
|
|
pkg.bin = try Bin.parseAppend(allocator, bin_expr, &string_buf, &lockfile.buffers.extern_strings);
|
|
} else if (ws_json.get("directories")) |directories_expr| {
|
|
if (directories_expr.get("bin")) |bin_expr| {
|
|
pkg.bin = try Bin.parseAppendFromDirectories(allocator, bin_expr, &string_buf);
|
|
}
|
|
}
|
|
|
|
const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items);
|
|
|
|
const entry = try pkg_map.getOrPut(abs_path);
|
|
if (entry.found_existing) {
|
|
return invalidYarnLockfile();
|
|
}
|
|
|
|
entry.value_ptr.* = pkg_id;
|
|
}
|
|
|
|
const workspace_pkgs_end = lockfile.packages.len;
|
|
var added_count: usize = 0;
|
|
var skipped_workspace: usize = 0;
|
|
var skipped_empty: usize = 0;
|
|
var skipped_other: usize = 0;
|
|
for (entries.items) |entry| {
|
|
if (entry.specs.items.len == 0 or entry.version.len == 0) {
|
|
skipped_empty += 1;
|
|
continue;
|
|
}
|
|
|
|
const first_spec = entry.specs.items[0];
|
|
|
|
if (strings.eqlComptime(entry.version, "0.0.0-use.local")) {
|
|
skipped_workspace += 1;
|
|
continue;
|
|
}
|
|
|
|
var is_github_spec = false;
|
|
var is_git_spec = false;
|
|
var is_tarball_url_spec = false;
|
|
|
|
if (strings.containsComptime(first_spec, "@github:")) {
|
|
is_github_spec = true;
|
|
} else if (strings.containsComptime(first_spec, "@git+")) {
|
|
is_git_spec = true;
|
|
} else if (strings.containsComptime(first_spec, "@https://") or
|
|
strings.containsComptime(first_spec, "@http://"))
|
|
{
|
|
is_tarball_url_spec = true;
|
|
}
|
|
|
|
const real_name = blk: {
|
|
if (strings.containsComptime(first_spec, "@npm:")) {
|
|
if (strings.hasPrefixComptime(first_spec, "@")) {
|
|
if (strings.indexOfChar(first_spec, '@')) |first_at| {
|
|
const after_first_at = first_spec[first_at + 1 ..];
|
|
if (strings.indexOfChar(after_first_at, '@')) |second_at_in_substr| {
|
|
const second_at = first_at + 1 + second_at_in_substr;
|
|
if (strings.hasPrefixComptime(first_spec[second_at..], "@npm:")) {
|
|
const real_spec = first_spec[second_at + 5 ..];
|
|
const real_pkg_name = extractPackageName(real_spec);
|
|
break :blk real_pkg_name;
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
if (strings.indexOfChar(first_spec, '@')) |at_pos| {
|
|
const after_at = first_spec[at_pos + 1 ..];
|
|
if (strings.hasPrefixComptime(after_at, "npm:")) {
|
|
const real_spec = first_spec[at_pos + 5 ..];
|
|
const real_pkg_name = extractPackageName(real_spec);
|
|
break :blk real_pkg_name;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
break :blk extractPackageName(first_spec);
|
|
};
|
|
|
|
var real_name_hash = String.Builder.stringHash(real_name);
|
|
var real_name_string = try string_buf.appendWithHash(real_name, real_name_hash);
|
|
|
|
var res: Resolution = undefined;
|
|
|
|
if (is_github_spec) {
|
|
const at_github_idx = strings.indexOf(first_spec, "@github:") orelse {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
const github_spec = first_spec[at_github_idx + 8 ..];
|
|
var repo = try Repository.parseAppendGithub(github_spec, &string_buf);
|
|
|
|
if (repo.committish.isEmpty() and entry.resolved.len > 0) {
|
|
if (Resolution.fromPnpmLockfile(entry.resolved, &string_buf)) |resolved_res| {
|
|
if (resolved_res.tag == .github) {
|
|
repo.committish = resolved_res.value.github.committish;
|
|
}
|
|
} else |_| {}
|
|
}
|
|
|
|
if (repo.committish.len() > 0) {
|
|
const committish = repo.committish.slice(string_buf.bytes.items);
|
|
if (committish.len > 7) {
|
|
repo.committish = try string_buf.append(committish[0..7]);
|
|
}
|
|
}
|
|
|
|
res = .init(.{ .github = repo });
|
|
const alias_name = first_spec[0..at_github_idx];
|
|
real_name_hash = String.Builder.stringHash(alias_name);
|
|
real_name_string = try string_buf.appendWithHash(alias_name, real_name_hash);
|
|
} else if (is_git_spec) {
|
|
const at_git_idx = strings.indexOf(first_spec, "@git+") orelse {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
const git_spec = first_spec[at_git_idx + 1 ..];
|
|
|
|
if (strings.containsComptime(git_spec, "github.com")) {
|
|
const github_com_idx = strings.indexOf(git_spec, "github.com/") orelse {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
var path = git_spec[github_com_idx + "github.com/".len ..];
|
|
|
|
if (strings.hasPrefixComptime(path, "git+")) {
|
|
path = path["git+".len..];
|
|
}
|
|
|
|
var hash_idx: usize = 0;
|
|
var slash_idx: usize = 0;
|
|
for (path, 0..) |c, i| {
|
|
switch (c) {
|
|
'/' => slash_idx = i,
|
|
'#' => {
|
|
hash_idx = i;
|
|
break;
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
const owner = path[0..slash_idx];
|
|
var repo_part = if (hash_idx == 0) path[slash_idx + 1 ..] else path[slash_idx + 1 .. hash_idx];
|
|
|
|
if (strings.hasSuffixComptime(repo_part, ".git")) {
|
|
repo_part = repo_part[0 .. repo_part.len - 4];
|
|
}
|
|
|
|
var repo_result: Repository = .{
|
|
.owner = try string_buf.append(owner),
|
|
.repo = try string_buf.append(repo_part),
|
|
};
|
|
|
|
if (hash_idx != 0) {
|
|
const committish = path[hash_idx + 1 ..];
|
|
const short_hash = if (committish.len > 7) committish[0..7] else committish;
|
|
repo_result.committish = try string_buf.append(short_hash);
|
|
} else if (entry.resolved.len > 0) {
|
|
if (strings.indexOfChar(entry.resolved, '#')) |resolved_hash_idx| {
|
|
const committish = entry.resolved[resolved_hash_idx + 1 ..];
|
|
const short_hash = if (committish.len > 7) committish[0..7] else committish;
|
|
repo_result.committish = try string_buf.append(short_hash);
|
|
}
|
|
}
|
|
|
|
res = .init(.{ .github = repo_result });
|
|
real_name_hash = String.Builder.stringHash(repo_part);
|
|
real_name_string = repo_result.repo;
|
|
} else {
|
|
var repo = try Repository.parseAppendGit(git_spec, &string_buf);
|
|
res = .init(.{ .git = repo });
|
|
var repo_name = repo.repo.slice(string_buf.bytes.items);
|
|
if (strings.hasSuffixComptime(repo_name, ".git")) {
|
|
repo_name = repo_name[0 .. repo_name.len - 4];
|
|
}
|
|
if (strings.lastIndexOfChar(repo_name, '/')) |slash| {
|
|
repo_name = repo_name[slash + 1 ..];
|
|
}
|
|
real_name_hash = String.Builder.stringHash(repo_name);
|
|
real_name_string = try string_buf.appendWithHash(repo_name, real_name_hash);
|
|
}
|
|
} else if (is_tarball_url_spec) {
|
|
const at_http_idx = strings.indexOf(first_spec, "@http") orelse {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
const url_after_at = first_spec[at_http_idx + 1 ..];
|
|
|
|
if (strings.indexOf(url_after_at, "/-/")) |dash_slash_idx| {
|
|
const before_dash_slash = url_after_at[0..dash_slash_idx];
|
|
if (strings.lastIndexOfChar(before_dash_slash, '/')) |last_slash| {
|
|
const real_pkg_name_from_url = before_dash_slash[last_slash + 1 ..];
|
|
real_name_hash = String.Builder.stringHash(real_pkg_name_from_url);
|
|
real_name_string = try string_buf.appendWithHash(real_pkg_name_from_url, real_name_hash);
|
|
} else {
|
|
const real_pkg_name_from_spec = first_spec[0..at_http_idx];
|
|
real_name_hash = String.Builder.stringHash(real_pkg_name_from_spec);
|
|
real_name_string = try string_buf.appendWithHash(real_pkg_name_from_spec, real_name_hash);
|
|
}
|
|
} else {
|
|
const real_pkg_name_from_spec = first_spec[0..at_http_idx];
|
|
real_name_hash = String.Builder.stringHash(real_pkg_name_from_spec);
|
|
real_name_string = try string_buf.appendWithHash(real_pkg_name_from_spec, real_name_hash);
|
|
}
|
|
const version_str = try string_buf.append(entry.version);
|
|
const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items));
|
|
|
|
if (!parsed.valid or parsed.version.major == null or parsed.version.minor == null or parsed.version.patch == null) {
|
|
skipped_other += 1;
|
|
continue;
|
|
}
|
|
|
|
res = .init(.{ .npm = .{
|
|
.version = parsed.version.min(),
|
|
.url = try string_buf.append(url_after_at),
|
|
} });
|
|
} else if (entry.resolved.len == 0) {
|
|
if (strings.containsComptime(first_spec, "@file:")) {
|
|
const at_file_idx = strings.indexOf(first_spec, "@file:") orelse {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
const path = first_spec[at_file_idx + 6 ..];
|
|
if (strings.hasSuffixComptime(path, ".tgz") or strings.hasSuffixComptime(path, ".tar.gz")) {
|
|
res = .init(.{ .local_tarball = try string_buf.append(path) });
|
|
} else {
|
|
res = .init(.{ .folder = try string_buf.append(path) });
|
|
}
|
|
} else {
|
|
skipped_other += 1;
|
|
continue;
|
|
}
|
|
} else if (isDefaultRegistry(entry.resolved) and
|
|
(strings.hasPrefixComptime(entry.resolved, "https://") or
|
|
strings.hasPrefixComptime(entry.resolved, "http://")))
|
|
{
|
|
const version_str = try string_buf.append(entry.version);
|
|
const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items));
|
|
|
|
if (!parsed.valid) {
|
|
continue;
|
|
}
|
|
|
|
const scope = manager.scopeForPackageName(real_name);
|
|
const url = try ExtractTarball.buildURL(
|
|
scope.url.href,
|
|
strings.StringOrTinyString.init(real_name),
|
|
parsed.version.min(),
|
|
string_buf.bytes.items,
|
|
);
|
|
|
|
res = .init(.{ .npm = .{
|
|
.version = parsed.version.min(),
|
|
.url = try string_buf.append(url),
|
|
} });
|
|
} else {
|
|
res = Resolution.fromPnpmLockfile(entry.resolved, &string_buf) catch {
|
|
skipped_other += 1;
|
|
continue;
|
|
};
|
|
|
|
switch (res.tag) {
|
|
.github => {
|
|
var repo = res.value.github;
|
|
if (repo.committish.len() > 0) {
|
|
const committish = repo.committish.slice(string_buf.bytes.items);
|
|
if (committish.len > 7) {
|
|
repo.committish = try string_buf.append(committish[0..7]);
|
|
res = .init(.{ .github = repo });
|
|
}
|
|
}
|
|
const repo_name = repo.repo.slice(string_buf.bytes.items);
|
|
real_name_hash = String.Builder.stringHash(repo_name);
|
|
real_name_string = repo.repo;
|
|
},
|
|
.git => {
|
|
const repo = res.value.git;
|
|
var repo_name = repo.repo.slice(string_buf.bytes.items);
|
|
if (strings.hasSuffixComptime(repo_name, ".git")) {
|
|
repo_name = repo_name[0 .. repo_name.len - 4];
|
|
}
|
|
if (strings.lastIndexOfChar(repo_name, '/')) |slash| {
|
|
repo_name = repo_name[slash + 1 ..];
|
|
}
|
|
real_name_hash = String.Builder.stringHash(repo_name);
|
|
real_name_string = try string_buf.appendWithHash(repo_name, real_name_hash);
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
added_count += 1;
|
|
var pkg: Lockfile.Package = .{
|
|
.name = real_name_string,
|
|
.name_hash = real_name_hash,
|
|
.resolution = res.copy(),
|
|
};
|
|
|
|
if (entry.integrity.len > 0) {
|
|
pkg.meta.integrity = Integrity.parse(entry.integrity);
|
|
}
|
|
|
|
const deps_off, const deps_len = try parseYarnDependencies(
|
|
lockfile,
|
|
allocator,
|
|
&entry,
|
|
&string_buf,
|
|
log,
|
|
);
|
|
|
|
pkg.dependencies = .{ .off = deps_off, .len = deps_len };
|
|
pkg.resolutions = .{ .off = deps_off, .len = deps_len };
|
|
|
|
const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items);
|
|
|
|
var key_buf: [1024]u8 = undefined;
|
|
for (entry.specs.items) |spec| {
|
|
const key = std.fmt.bufPrint(&key_buf, "{s}", .{spec}) catch continue;
|
|
const pkg_entry = try pkg_map.getOrPut(try allocator.dupe(u8, key));
|
|
if (!pkg_entry.found_existing) {
|
|
pkg_entry.value_ptr.* = pkg_id;
|
|
}
|
|
}
|
|
}
|
|
|
|
break :build .{
|
|
pkg_map,
|
|
workspace_pkgs_off,
|
|
workspace_pkgs_end,
|
|
};
|
|
};
|
|
|
|
const string_buf = lockfile.buffers.string_bytes.items;
|
|
|
|
var res_buf: std.ArrayList(u8) = .init(allocator);
|
|
defer res_buf.deinit();
|
|
|
|
try lockfile.buffers.resolutions.ensureTotalCapacityPrecise(allocator, lockfile.buffers.dependencies.items.len);
|
|
lockfile.buffers.resolutions.expandToCapacity();
|
|
@memset(lockfile.buffers.resolutions.items, invalid_package_id);
|
|
|
|
const pkgs = lockfile.packages.slice();
|
|
const pkg_deps = pkgs.items(.dependencies);
|
|
|
|
{
|
|
for (pkg_deps[0].begin()..pkg_deps[0].end()) |_dep_id| {
|
|
const dep_id: DependencyID = @intCast(_dep_id);
|
|
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
|
|
|
if (dep.behavior.isWorkspace()) {
|
|
const workspace_path = dep.version.value.workspace.slice(string_buf);
|
|
var path_buf: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer path_buf.deinit();
|
|
path_buf.append(workspace_path);
|
|
if (pkg_map.get(path_buf.slice())) |workspace_pkg_id| {
|
|
lockfile.buffers.resolutions.items[dep_id] = workspace_pkg_id;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
const dep_name = dep.name.slice(string_buf);
|
|
const version_str = dep.version.literal.slice(string_buf);
|
|
|
|
res_buf.clearRetainingCapacity();
|
|
try res_buf.writer().print("{s}@{s}", .{ dep_name, version_str });
|
|
|
|
const pkg_id = pkg_map.get(res_buf.items) orelse {
|
|
continue;
|
|
};
|
|
|
|
lockfile.buffers.resolutions.items[dep_id] = pkg_id;
|
|
}
|
|
}
|
|
|
|
for (workspace_pkgs_off..workspace_pkgs_end) |_pkg_id| {
|
|
const pkg_id: PackageID = @intCast(_pkg_id);
|
|
const deps = pkg_deps[pkg_id];
|
|
|
|
for (deps.begin()..deps.end()) |_dep_id| {
|
|
const dep_id: DependencyID = @intCast(_dep_id);
|
|
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
|
|
|
if (dep.behavior.isWorkspace()) {
|
|
continue;
|
|
}
|
|
|
|
const dep_name = dep.name.slice(string_buf);
|
|
const version_str = dep.version.literal.slice(string_buf);
|
|
|
|
res_buf.clearRetainingCapacity();
|
|
try res_buf.writer().print("{s}@{s}", .{ dep_name, version_str });
|
|
|
|
const res_pkg_id = pkg_map.get(res_buf.items) orelse {
|
|
continue;
|
|
};
|
|
|
|
lockfile.buffers.resolutions.items[dep_id] = res_pkg_id;
|
|
}
|
|
}
|
|
|
|
for (workspace_pkgs_end..lockfile.packages.len) |_pkg_id| {
|
|
const pkg_id: PackageID = @intCast(_pkg_id);
|
|
const deps = pkg_deps[pkg_id];
|
|
|
|
for (deps.begin()..deps.end()) |_dep_id| {
|
|
const dep_id: DependencyID = @intCast(_dep_id);
|
|
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
|
const dep_name = dep.name.slice(string_buf);
|
|
const version_str = dep.version.literal.slice(string_buf);
|
|
|
|
res_buf.clearRetainingCapacity();
|
|
try res_buf.writer().print("{s}@{s}", .{ dep_name, version_str });
|
|
|
|
const res_pkg_id = pkg_map.get(res_buf.items) orelse {
|
|
continue;
|
|
};
|
|
|
|
lockfile.buffers.resolutions.items[dep_id] = res_pkg_id;
|
|
}
|
|
}
|
|
|
|
try lockfile.resolve(log);
|
|
|
|
try lockfile.fetchNecessaryPackageMetadataAfterYarnOrPnpmMigration(manager, .yarn_classic);
|
|
|
|
return .{
|
|
.ok = .{
|
|
.lockfile = lockfile,
|
|
.loaded_from_binary_lockfile = false,
|
|
.migrated = .yarn,
|
|
.serializer_result = .{},
|
|
.format = .text,
|
|
},
|
|
};
|
|
}
|
|
|
|
fn migrateYarnBerry(
|
|
lockfile: *Lockfile,
|
|
manager: *PackageManager,
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
data: []const u8,
|
|
) MigrateYarnLockfileError!LoadResult {
|
|
var yaml_arena = bun.ArenaAllocator.init(allocator);
|
|
defer yaml_arena.deinit();
|
|
|
|
const yaml_source = logger.Source.initPathString("yarn.lock", data);
|
|
const _root = YAML.parse(&yaml_source, log, yaml_arena.allocator()) catch {
|
|
try log.addError(null, logger.Loc.Empty, "Failed to parse yarn.lock as YAML");
|
|
return error.YarnBerryParseError;
|
|
};
|
|
|
|
const json = try _root.deepClone(allocator);
|
|
|
|
if (json.data != .e_object) {
|
|
try log.addError(null, logger.Loc.Empty, "Yarn Berry lockfile root is not an object");
|
|
return error.InvalidYarnBerryLockfile;
|
|
}
|
|
|
|
const root = json;
|
|
|
|
const metadata = root.get("__metadata") orelse {
|
|
try log.addError(null, logger.Loc.Empty, "Missing __metadata in yarn.lock (not a valid Yarn Berry lockfile)");
|
|
return error.InvalidYarnBerryLockfile;
|
|
};
|
|
|
|
if (metadata.data != .e_object) {
|
|
try log.addError(null, logger.Loc.Empty, "__metadata is not an object");
|
|
return error.InvalidYarnBerryLockfile;
|
|
}
|
|
|
|
if (metadata.get("version")) |version_node| {
|
|
if (version_node.data == .e_string) {
|
|
const version_str = version_node.data.e_string.data;
|
|
const version = std.fmt.parseInt(u32, version_str, 10) catch {
|
|
try log.addError(null, logger.Loc.Empty, "Invalid __metadata.version format");
|
|
return error.InvalidYarnBerryLockfile;
|
|
};
|
|
if (version < 6) {
|
|
try log.addErrorFmt(
|
|
null,
|
|
logger.Loc.Empty,
|
|
allocator,
|
|
"Yarn Berry lockfile version {d} is too old. Please upgrade to v6+.",
|
|
.{version},
|
|
);
|
|
return error.YarnBerryVersionTooOld;
|
|
}
|
|
}
|
|
}
|
|
|
|
bun.Output.prettyErrorln("<yellow>Note:<r> Yarn Berry (v2+) migration is experimental. Some features may not work correctly.", .{});
|
|
|
|
var string_buf = lockfile.stringBuf();
|
|
|
|
var root_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer root_pkg_json_path.deinit();
|
|
root_pkg_json_path.append("package.json");
|
|
|
|
const root_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, root_pkg_json_path.slice(), .{}).unwrap() catch {
|
|
try log.addError(null, logger.Loc.Empty, "Failed to read root package.json");
|
|
return error.MissingRootPackageJson;
|
|
};
|
|
|
|
const root_json = root_pkg_json.root;
|
|
|
|
try scanWorkspaces(lockfile, manager, allocator, log, &root_json);
|
|
|
|
{
|
|
var root_pkg: Lockfile.Package = .{};
|
|
|
|
if (try root_json.getString(allocator, "name")) |name_info| {
|
|
const name, _ = name_info;
|
|
const name_hash = String.Builder.stringHash(name);
|
|
root_pkg.name = try string_buf.appendWithHash(name, name_hash);
|
|
root_pkg.name_hash = name_hash;
|
|
|
|
const root_path = try string_buf.append(".");
|
|
try lockfile.workspace_paths.put(allocator, name_hash, root_path);
|
|
|
|
if (try root_json.getString(allocator, "version")) |version_info| {
|
|
const version, _ = version_info;
|
|
const version_str = try string_buf.append(version);
|
|
const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items));
|
|
if (parsed.valid) {
|
|
try lockfile.workspace_versions.put(allocator, name_hash, parsed.version.min());
|
|
}
|
|
} else {
|
|
try lockfile.workspace_versions.put(allocator, name_hash, Semver.Version{});
|
|
}
|
|
}
|
|
|
|
const root_deps_off, var root_deps_len = try parsePackageJsonDependencies(
|
|
lockfile,
|
|
manager,
|
|
allocator,
|
|
&root_json,
|
|
&string_buf,
|
|
log,
|
|
);
|
|
|
|
const workspace_deps_start = lockfile.buffers.dependencies.items.len;
|
|
for (lockfile.workspace_paths.values()) |workspace_path| {
|
|
var ws_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer ws_pkg_json_path.deinit();
|
|
|
|
ws_pkg_json_path.append(workspace_path.slice(string_buf.bytes.items));
|
|
ws_pkg_json_path.append("package.json");
|
|
|
|
const ws_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, ws_pkg_json_path.slice(), .{}).unwrap() catch continue;
|
|
const ws_json = ws_pkg_json.root;
|
|
|
|
const ws_name, _ = try ws_json.getString(allocator, "name") orelse continue;
|
|
const ws_name_hash = String.Builder.stringHash(ws_name);
|
|
|
|
const ws_dep: Dependency = .{
|
|
.name = try string_buf.appendWithHash(ws_name, ws_name_hash),
|
|
.name_hash = ws_name_hash,
|
|
.behavior = .{ .workspace = true },
|
|
.version = .{
|
|
.tag = .workspace,
|
|
.value = .{ .workspace = workspace_path },
|
|
},
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, ws_dep);
|
|
}
|
|
const workspace_deps_count: u32 = @intCast(lockfile.buffers.dependencies.items.len - workspace_deps_start);
|
|
root_deps_len += workspace_deps_count;
|
|
|
|
root_pkg.dependencies = .{ .off = root_deps_off, .len = root_deps_len };
|
|
root_pkg.resolutions = .{ .off = root_deps_off, .len = root_deps_len };
|
|
root_pkg.meta.id = 0;
|
|
root_pkg.resolution = .init(.{ .root = {} });
|
|
|
|
if (root_json.get("bin")) |bin_expr| {
|
|
root_pkg.bin = try Bin.parseAppend(allocator, bin_expr, &string_buf, &lockfile.buffers.extern_strings);
|
|
} else if (root_json.get("directories")) |directories_expr| {
|
|
if (directories_expr.get("bin")) |bin_expr| {
|
|
root_pkg.bin = try Bin.parseAppendFromDirectories(allocator, bin_expr, &string_buf);
|
|
}
|
|
}
|
|
|
|
try lockfile.packages.append(allocator, root_pkg);
|
|
try lockfile.getOrPutID(0, root_pkg.name_hash);
|
|
}
|
|
|
|
var pkg_map = std.StringHashMap(PackageID).init(allocator);
|
|
defer pkg_map.deinit();
|
|
|
|
const top_level_dir = bun.fs.FileSystem.instance.top_level_dir;
|
|
try pkg_map.put(top_level_dir, 0);
|
|
|
|
for (lockfile.workspace_paths.values()) |workspace_path| {
|
|
const ws_path_str = workspace_path.slice(string_buf.bytes.items);
|
|
if (strings.eqlComptime(ws_path_str, ".")) continue;
|
|
|
|
var ws_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer ws_pkg_json_path.deinit();
|
|
|
|
ws_pkg_json_path.append(ws_path_str);
|
|
const abs_path = try allocator.dupe(u8, ws_pkg_json_path.slice());
|
|
ws_pkg_json_path.append("package.json");
|
|
|
|
const ws_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, ws_pkg_json_path.slice(), .{}).unwrap() catch continue;
|
|
const ws_json = ws_pkg_json.root;
|
|
|
|
const name, _ = try ws_json.getString(allocator, "name") orelse continue;
|
|
const name_hash = String.Builder.stringHash(name);
|
|
|
|
var pkg: Lockfile.Package = .{
|
|
.name = try string_buf.appendWithHash(name, name_hash),
|
|
.name_hash = name_hash,
|
|
.resolution = .init(.{ .workspace = workspace_path }),
|
|
};
|
|
|
|
const deps_off, const deps_len = try parsePackageJsonDependencies(
|
|
lockfile,
|
|
manager,
|
|
allocator,
|
|
&ws_json,
|
|
&string_buf,
|
|
log,
|
|
);
|
|
|
|
pkg.dependencies = .{ .off = deps_off, .len = deps_len };
|
|
pkg.resolutions = .{ .off = deps_off, .len = deps_len };
|
|
|
|
if (ws_json.get("bin")) |bin_expr| {
|
|
pkg.bin = try Bin.parseAppend(allocator, bin_expr, &string_buf, &lockfile.buffers.extern_strings);
|
|
} else if (ws_json.get("directories")) |directories_expr| {
|
|
if (directories_expr.get("bin")) |bin_expr| {
|
|
pkg.bin = try Bin.parseAppendFromDirectories(allocator, bin_expr, &string_buf);
|
|
}
|
|
}
|
|
|
|
const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items);
|
|
|
|
const entry = try pkg_map.getOrPut(abs_path);
|
|
if (entry.found_existing) {
|
|
try log.addError(null, logger.Loc.Empty, "Duplicate workspace package");
|
|
return error.InvalidYarnBerryLockfile;
|
|
}
|
|
|
|
entry.value_ptr.* = pkg_id;
|
|
}
|
|
|
|
var skipped_virtual: usize = 0;
|
|
var skipped_patch: usize = 0;
|
|
var skipped_link: usize = 0;
|
|
var skipped_file: usize = 0;
|
|
var skipped_portal: usize = 0;
|
|
var skipped_exec: usize = 0;
|
|
var skipped_other: usize = 0;
|
|
var added_count: usize = 0;
|
|
|
|
var spec_to_pkg_id = std.StringHashMap(PackageID).init(allocator);
|
|
defer spec_to_pkg_id.deinit();
|
|
|
|
for (root.data.e_object.properties.slice()) |prop| {
|
|
const key = prop.key orelse continue;
|
|
const value = prop.value orelse continue;
|
|
|
|
const key_str = key.asString(allocator) orelse continue;
|
|
|
|
if (strings.eqlComptime(key_str, "__metadata")) continue;
|
|
|
|
if (value.data != .e_object) continue;
|
|
const entry_obj = value;
|
|
|
|
const resolution_node = entry_obj.get("resolution") orelse continue;
|
|
const resolution_str = resolution_node.asString(allocator) orelse continue;
|
|
|
|
if (strings.contains(resolution_str, "@workspace:")) continue;
|
|
|
|
if (strings.contains(resolution_str, "@virtual:")) {
|
|
skipped_virtual += 1;
|
|
continue;
|
|
}
|
|
|
|
if (strings.contains(resolution_str, "@patch:")) {
|
|
skipped_patch += 1;
|
|
continue;
|
|
}
|
|
|
|
if (strings.contains(resolution_str, "@link:")) {
|
|
skipped_link += 1;
|
|
continue;
|
|
}
|
|
|
|
if (strings.contains(resolution_str, "@file:")) {
|
|
skipped_file += 1;
|
|
continue;
|
|
}
|
|
|
|
if (strings.contains(resolution_str, "@portal:")) {
|
|
skipped_portal += 1;
|
|
continue;
|
|
}
|
|
|
|
if (strings.contains(resolution_str, "@exec:")) {
|
|
skipped_exec += 1;
|
|
continue;
|
|
}
|
|
|
|
if (!strings.contains(resolution_str, "@npm:")) {
|
|
skipped_other += 1;
|
|
continue;
|
|
}
|
|
|
|
const version_node = entry_obj.get("version") orelse continue;
|
|
const version_str = version_node.asString(allocator) orelse continue;
|
|
|
|
const at_npm_idx = strings.indexOf(resolution_str, "@npm:") orelse continue;
|
|
const pkg_name = if (at_npm_idx == 0) blk: {
|
|
const after_npm = resolution_str[5..];
|
|
if (strings.indexOfChar(after_npm, '@')) |at_idx| {
|
|
break :blk after_npm[0..at_idx];
|
|
}
|
|
break :blk after_npm;
|
|
} else resolution_str[0..at_npm_idx];
|
|
|
|
const name_hash = String.Builder.stringHash(pkg_name);
|
|
const name = try string_buf.appendWithHash(pkg_name, name_hash);
|
|
|
|
const version_string = try string_buf.append(version_str);
|
|
const sliced_version = version_string.sliced(string_buf.bytes.items);
|
|
const parsed = Semver.Version.parse(sliced_version);
|
|
if (!parsed.valid) continue;
|
|
|
|
const scope = manager.scopeForPackageName(name.slice(string_buf.bytes.items));
|
|
const url = try ExtractTarball.buildURL(
|
|
scope.url.href,
|
|
strings.StringOrTinyString.init(pkg_name),
|
|
parsed.version.min(),
|
|
string_buf.bytes.items,
|
|
);
|
|
|
|
const res = Resolution.init(.{
|
|
.npm = .{
|
|
.version = parsed.version.min(),
|
|
.url = try string_buf.append(url),
|
|
},
|
|
});
|
|
|
|
var pkg: Lockfile.Package = .{
|
|
.name = name,
|
|
.name_hash = name_hash,
|
|
.resolution = res,
|
|
};
|
|
|
|
const deps_off = lockfile.buffers.dependencies.items.len;
|
|
|
|
if (entry_obj.get("dependencies")) |deps_node| {
|
|
if (deps_node.data == .e_object) {
|
|
for (deps_node.data.e_object.properties.slice()) |dep_prop| {
|
|
const dep_key = dep_prop.key orelse continue;
|
|
const dep_value = dep_prop.value orelse continue;
|
|
|
|
const dep_name_str = dep_key.asString(allocator) orelse continue;
|
|
var dep_version_raw = dep_value.asString(allocator) orelse continue;
|
|
|
|
if (strings.hasPrefixComptime(dep_version_raw, "npm:")) {
|
|
dep_version_raw = dep_version_raw[4..];
|
|
}
|
|
|
|
const dep_name_hash = String.Builder.stringHash(dep_name_str);
|
|
const dep_name = try string_buf.appendExternalWithHash(dep_name_str, dep_name_hash);
|
|
|
|
const dep_version = try string_buf.append(dep_version_raw);
|
|
const dep_version_sliced = dep_version.sliced(string_buf.bytes.items);
|
|
|
|
const dep: Dependency = .{
|
|
.name = dep_name.value,
|
|
.name_hash = dep_name.hash,
|
|
.behavior = .{ .prod = true },
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
dep_name.value,
|
|
dep_name.hash,
|
|
dep_version_sliced.slice,
|
|
&dep_version_sliced,
|
|
log,
|
|
manager,
|
|
) orelse continue,
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, dep);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Parse peerDependenciesMeta first to know which peers are optional
|
|
var optional_peer_set = std.AutoHashMap(u64, void).init(allocator);
|
|
defer optional_peer_set.deinit();
|
|
|
|
if (entry_obj.get("peerDependenciesMeta")) |peer_meta_node| {
|
|
if (peer_meta_node.data == .e_object) {
|
|
for (peer_meta_node.data.e_object.properties.slice()) |meta_prop| {
|
|
const meta_key = meta_prop.key orelse continue;
|
|
const meta_value = meta_prop.value orelse continue;
|
|
|
|
if (meta_value.data != .e_object) continue;
|
|
|
|
// Check if this peer dep is marked as optional
|
|
if (meta_value.data.e_object.get("optional")) |opt_node| {
|
|
if (opt_node.data == .e_boolean and opt_node.data.e_boolean.value) {
|
|
const peer_name_str = meta_key.asString(allocator) orelse continue;
|
|
const peer_name_hash = String.Builder.stringHash(peer_name_str);
|
|
try optional_peer_set.put(peer_name_hash, {});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (entry_obj.get("peerDependencies")) |peers_node| {
|
|
if (peers_node.data == .e_object) {
|
|
for (peers_node.data.e_object.properties.slice()) |peer_prop| {
|
|
const peer_key = peer_prop.key orelse continue;
|
|
const peer_value = peer_prop.value orelse continue;
|
|
|
|
const peer_name_str = peer_key.asString(allocator) orelse continue;
|
|
var peer_version_raw = peer_value.asString(allocator) orelse continue;
|
|
|
|
if (strings.hasPrefixComptime(peer_version_raw, "npm:")) {
|
|
peer_version_raw = peer_version_raw[4..];
|
|
}
|
|
|
|
const peer_name_hash = String.Builder.stringHash(peer_name_str);
|
|
const peer_name = try string_buf.appendExternalWithHash(peer_name_str, peer_name_hash);
|
|
|
|
const peer_version = try string_buf.append(peer_version_raw);
|
|
const peer_version_sliced = peer_version.sliced(string_buf.bytes.items);
|
|
|
|
const is_optional = optional_peer_set.contains(peer_name_hash);
|
|
|
|
const peer_dep: Dependency = .{
|
|
.name = peer_name.value,
|
|
.name_hash = peer_name.hash,
|
|
.behavior = .{ .peer = true, .optional = is_optional },
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
peer_name.value,
|
|
peer_name.hash,
|
|
peer_version_sliced.slice,
|
|
&peer_version_sliced,
|
|
log,
|
|
manager,
|
|
) orelse continue,
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, peer_dep);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (entry_obj.get("dependenciesMeta")) |deps_meta_node| {
|
|
if (deps_meta_node.data == .e_object) {
|
|
for (deps_meta_node.data.e_object.properties.slice()) |meta_prop| {
|
|
const meta_key = meta_prop.key orelse continue;
|
|
const meta_value = meta_prop.value orelse continue;
|
|
|
|
if (meta_value.data != .e_object) continue;
|
|
|
|
const dep_name_str = meta_key.asString(allocator) orelse continue;
|
|
const dep_name_hash = String.Builder.stringHash(dep_name_str);
|
|
|
|
if (meta_value.get("optional")) |optional_node| {
|
|
if (optional_node.data == .e_boolean and optional_node.data.e_boolean.value) {
|
|
const deps_buf = lockfile.buffers.dependencies.items[deps_off..];
|
|
for (deps_buf) |*dep| {
|
|
if (dep.name_hash == dep_name_hash) {
|
|
dep.behavior.optional = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (entry_obj.get("bin")) |bin_node| {
|
|
if (bin_node.data == .e_object) {
|
|
const bin_obj = bin_node.data.e_object;
|
|
switch (bin_obj.properties.len) {
|
|
0 => {},
|
|
1 => {
|
|
const bin_name_str = bin_obj.properties.ptr[0].key.?.asString(allocator) orelse continue;
|
|
const bin_path_str = bin_obj.properties.ptr[0].value.?.asString(allocator) orelse continue;
|
|
pkg.bin = .{
|
|
.tag = .named_file,
|
|
.value = .{
|
|
.named_file = .{
|
|
try string_buf.append(bin_name_str),
|
|
try string_buf.append(bin_path_str),
|
|
},
|
|
},
|
|
};
|
|
},
|
|
else => {
|
|
const current_len = lockfile.buffers.extern_strings.items.len;
|
|
const count = bin_obj.properties.len * 2;
|
|
try lockfile.buffers.extern_strings.ensureTotalCapacityPrecise(
|
|
lockfile.allocator,
|
|
current_len + count,
|
|
);
|
|
var extern_strings = lockfile.buffers.extern_strings.items.ptr[current_len .. current_len + count];
|
|
lockfile.buffers.extern_strings.items.len += count;
|
|
|
|
var i: usize = 0;
|
|
for (bin_obj.properties.slice()) |bin_prop| {
|
|
const bin_name_str = bin_prop.key.?.asString(allocator) orelse break;
|
|
const bin_path_str = bin_prop.value.?.asString(allocator) orelse break;
|
|
extern_strings[i] = try string_buf.appendExternal(bin_name_str);
|
|
i += 1;
|
|
extern_strings[i] = try string_buf.appendExternal(bin_path_str);
|
|
i += 1;
|
|
}
|
|
pkg.bin = .{
|
|
.tag = .map,
|
|
.value = .{ .map = bun.install.ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) },
|
|
};
|
|
},
|
|
}
|
|
} else if (bin_node.data == .e_string) {
|
|
const bin_str = bin_node.data.e_string.data;
|
|
if (bin_str.len > 0) {
|
|
pkg.bin = .{
|
|
.tag = .file,
|
|
.value = .{ .file = try string_buf.append(bin_str) },
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
const deps_end = lockfile.buffers.dependencies.items.len;
|
|
pkg.dependencies = .{ .off = @intCast(deps_off), .len = @intCast(deps_end - deps_off) };
|
|
pkg.resolutions = .{ .off = @intCast(deps_off), .len = @intCast(deps_end - deps_off) };
|
|
|
|
if (entry_obj.get("conditions")) |conditions_node| {
|
|
if (conditions_node.asString(allocator)) |conditions_str| {
|
|
var os_negatable = Npm.OperatingSystem.none.negatable();
|
|
var arch_negatable = Npm.Architecture.none.negatable();
|
|
|
|
var iter = std.mem.splitSequence(u8, conditions_str, " & ");
|
|
while (iter.next()) |condition| {
|
|
const trimmed = strings.trim(condition, " \t");
|
|
if (strings.indexOfChar(trimmed, '=')) |eq_idx| {
|
|
const cond_key = trimmed[0..eq_idx];
|
|
const cond_value = trimmed[eq_idx + 1 ..];
|
|
|
|
if (strings.eqlComptime(cond_key, "os")) {
|
|
os_negatable.apply(cond_value);
|
|
} else if (strings.eqlComptime(cond_key, "cpu")) {
|
|
arch_negatable.apply(cond_value);
|
|
}
|
|
}
|
|
}
|
|
|
|
pkg.meta.os = os_negatable.combine();
|
|
pkg.meta.arch = arch_negatable.combine();
|
|
}
|
|
}
|
|
|
|
if (pkg.meta.os == .all) {
|
|
if (entry_obj.get("os")) |os_node| {
|
|
if (os_node.data == .e_array) {
|
|
var os_negatable = Npm.OperatingSystem.none.negatable();
|
|
for (os_node.data.e_array.slice()) |os_item| {
|
|
if (os_item.asString(allocator)) |os_str| {
|
|
os_negatable.apply(os_str);
|
|
}
|
|
}
|
|
pkg.meta.os = os_negatable.combine();
|
|
}
|
|
}
|
|
}
|
|
|
|
if (pkg.meta.arch == .all) {
|
|
if (entry_obj.get("cpu")) |cpu_node| {
|
|
if (cpu_node.data == .e_array) {
|
|
var arch_negatable = Npm.Architecture.none.negatable();
|
|
for (cpu_node.data.e_array.slice()) |cpu_item| {
|
|
if (cpu_item.asString(allocator)) |cpu_str| {
|
|
arch_negatable.apply(cpu_str);
|
|
}
|
|
}
|
|
pkg.meta.arch = arch_negatable.combine();
|
|
}
|
|
}
|
|
}
|
|
|
|
// if (entry_obj.get("checksum")) |checksum_node| {
|
|
// if (checksum_node.asString(allocator)) |checksum_str| {
|
|
// const maybe_integrity = convertBerryChecksum(checksum_str, allocator) catch null;
|
|
// if (maybe_integrity) |integrity_str| {
|
|
// defer allocator.free(integrity_str);
|
|
// pkg.meta.integrity = Integrity.parse(integrity_str);
|
|
// }
|
|
// }
|
|
// }
|
|
const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items);
|
|
|
|
var spec_iter = std.mem.splitSequence(u8, key_str, ", ");
|
|
while (spec_iter.next()) |spec_raw| {
|
|
var spec = strings.trim(spec_raw, " \t\"");
|
|
// Normalize: "fsevents@npm:^2.3.2" -> "fsevents@^2.3.2"
|
|
if (strings.indexOf(spec, "@npm:")) |npm_idx| {
|
|
var normalized_buf: [1024]u8 = undefined;
|
|
const normalized = std.fmt.bufPrint(&normalized_buf, "{s}@{s}", .{
|
|
spec[0..npm_idx],
|
|
spec[npm_idx + 5 ..],
|
|
}) catch spec;
|
|
const spec_copy = try allocator.dupe(u8, normalized);
|
|
try spec_to_pkg_id.put(spec_copy, pkg_id);
|
|
} else {
|
|
const spec_copy = try allocator.dupe(u8, spec);
|
|
try spec_to_pkg_id.put(spec_copy, pkg_id);
|
|
}
|
|
}
|
|
|
|
added_count += 1;
|
|
}
|
|
|
|
try lockfile.buffers.resolutions.ensureTotalCapacityPrecise(lockfile.allocator, lockfile.buffers.dependencies.items.len);
|
|
lockfile.buffers.resolutions.items.len = lockfile.buffers.dependencies.items.len;
|
|
@memset(lockfile.buffers.resolutions.items, invalid_package_id);
|
|
|
|
const string_buf_bytes = lockfile.buffers.string_bytes.items;
|
|
for (lockfile.buffers.dependencies.items, 0..) |dep, dep_id| {
|
|
const dep_name = dep.name.slice(string_buf_bytes);
|
|
const version_str = dep.version.literal.slice(string_buf_bytes);
|
|
|
|
var res_buf: [1024]u8 = undefined;
|
|
const key = std.fmt.bufPrint(&res_buf, "{s}@{s}", .{ dep_name, version_str }) catch continue;
|
|
|
|
if (spec_to_pkg_id.get(key)) |pkg_id| {
|
|
lockfile.buffers.resolutions.items[dep_id] = pkg_id;
|
|
} else if (dep.version.tag == .workspace or strings.hasPrefixComptime(version_str, "workspace:")) {
|
|
// Fallback for workspace: protocol without explicit lockfile entry (e.g. workspace:*)
|
|
const dep_name_hash = String.Builder.stringHash(dep_name);
|
|
if (lockfile.workspace_paths.get(dep_name_hash)) |_| {
|
|
// Find the workspace package by name
|
|
const pkg_names = lockfile.packages.items(.name);
|
|
const pkg_name_hashes = lockfile.packages.items(.name_hash);
|
|
const pkg_resolutions = lockfile.packages.items(.resolution);
|
|
for (pkg_names, pkg_name_hashes, pkg_resolutions, 0..) |_, pkg_name_hash, pkg_res, pkg_idx| {
|
|
if (pkg_name_hash == dep_name_hash and pkg_res.tag == .workspace) {
|
|
lockfile.buffers.resolutions.items[dep_id] = @intCast(pkg_idx);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
try lockfile.resolve(log);
|
|
|
|
try lockfile.fetchNecessaryPackageMetadataAfterYarnOrPnpmMigration(manager, .yarn_berry);
|
|
|
|
return .{
|
|
.ok = .{
|
|
.lockfile = lockfile,
|
|
.loaded_from_binary_lockfile = false,
|
|
.migrated = .yarn_berry,
|
|
.serializer_result = .{},
|
|
.format = .text,
|
|
},
|
|
};
|
|
}
|
|
|
|
fn scanWorkspaces(
|
|
lockfile: *Lockfile,
|
|
manager: *PackageManager,
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
root_json: *const Expr,
|
|
) !void {
|
|
var string_buf = lockfile.stringBuf();
|
|
|
|
if (root_json.get("workspaces")) |workspaces_expr| {
|
|
var workspace_patterns = std.ArrayList([]const u8).init(allocator);
|
|
defer workspace_patterns.deinit();
|
|
|
|
if (workspaces_expr.data == .e_array) {
|
|
for (workspaces_expr.data.e_array.slice()) |pattern_expr| {
|
|
if (pattern_expr.asString(allocator)) |pattern| {
|
|
try workspace_patterns.append(pattern);
|
|
}
|
|
}
|
|
} else if (workspaces_expr.data == .e_object) {
|
|
if (workspaces_expr.get("packages")) |packages_expr| {
|
|
if (packages_expr.data == .e_array) {
|
|
for (packages_expr.data.e_array.slice()) |pattern_expr| {
|
|
if (pattern_expr.asString(allocator)) |pattern| {
|
|
try workspace_patterns.append(pattern);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
|
defer arena.deinit();
|
|
|
|
const GlobWalker = glob.GlobWalker(null, glob.walk.SyscallAccessor, false);
|
|
|
|
for (workspace_patterns.items) |user_pattern| {
|
|
defer _ = arena.reset(.retain_capacity);
|
|
|
|
const glob_pattern = if (user_pattern.len == 0) "package.json" else brk: {
|
|
const parts = [_][]const u8{ user_pattern, "package.json" };
|
|
break :brk bun.handleOom(arena.allocator().dupe(u8, bun.path.join(parts, .auto)));
|
|
};
|
|
|
|
var walker: GlobWalker = .{};
|
|
const cwd = bun.fs.FileSystem.instance.top_level_dir;
|
|
if ((try walker.initWithCwd(&arena, glob_pattern, cwd, false, false, false, false, true)).asErr()) |_| {
|
|
continue;
|
|
}
|
|
defer walker.deinit(false);
|
|
|
|
var iter: GlobWalker.Iterator = .{
|
|
.walker = &walker,
|
|
};
|
|
defer iter.deinit();
|
|
if ((try iter.init()).asErr()) |_| {
|
|
continue;
|
|
}
|
|
|
|
while (switch (try iter.next()) {
|
|
.result => |r| r,
|
|
.err => |_| null,
|
|
}) |matched_path| {
|
|
if (strings.eqlComptime(matched_path, "package.json")) continue;
|
|
|
|
const entry_dir = bun.path.dirname(matched_path, .auto);
|
|
|
|
var ws_pkg_json_path: bun.AutoAbsPath = .initTopLevelDir();
|
|
defer ws_pkg_json_path.deinit();
|
|
|
|
ws_pkg_json_path.append(matched_path);
|
|
|
|
const ws_pkg_json = manager.workspace_package_json_cache.getWithPath(allocator, log, ws_pkg_json_path.slice(), .{}).unwrap() catch continue;
|
|
const ws_json = ws_pkg_json.root;
|
|
|
|
const name, _ = try ws_json.getString(allocator, "name") orelse continue;
|
|
const name_hash = String.Builder.stringHash(name);
|
|
|
|
try lockfile.workspace_paths.put(allocator, name_hash, try string_buf.append(entry_dir));
|
|
|
|
if (try ws_json.getString(allocator, "version")) |version_info| {
|
|
const version, _ = version_info;
|
|
const version_str = try string_buf.append(version);
|
|
const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items));
|
|
if (parsed.valid) {
|
|
try lockfile.workspace_versions.put(allocator, name_hash, parsed.version.min());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
const YarnEntry = struct {
|
|
specs: std.ArrayList([]const u8),
|
|
version: []const u8,
|
|
resolved: []const u8,
|
|
integrity: []const u8 = "",
|
|
dependencies: std.StringHashMap([]const u8),
|
|
optional_dependencies: std.StringHashMap([]const u8),
|
|
|
|
fn init(allocator: std.mem.Allocator) YarnEntry {
|
|
return .{
|
|
.specs = std.ArrayList([]const u8).init(allocator),
|
|
.version = "",
|
|
.resolved = "",
|
|
.dependencies = std.StringHashMap([]const u8).init(allocator),
|
|
.optional_dependencies = std.StringHashMap([]const u8).init(allocator),
|
|
};
|
|
}
|
|
|
|
fn deinit(self: *YarnEntry, allocator: std.mem.Allocator) void {
|
|
self.specs.deinit();
|
|
self.dependencies.deinit();
|
|
self.optional_dependencies.deinit();
|
|
if (self.version.len > 0) {
|
|
allocator.free(self.version);
|
|
}
|
|
if (self.resolved.len > 0) {
|
|
allocator.free(self.resolved);
|
|
}
|
|
if (self.integrity.len > 0) {
|
|
allocator.free(self.integrity);
|
|
}
|
|
}
|
|
};
|
|
|
|
fn parseYarnV1Lockfile(
|
|
data: []const u8,
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
) !std.ArrayList(YarnEntry) {
|
|
var entries = std.ArrayList(YarnEntry).init(allocator);
|
|
errdefer {
|
|
for (entries.items) |*entry| {
|
|
entry.deinit(allocator);
|
|
}
|
|
entries.deinit();
|
|
}
|
|
|
|
var current_entry_idx: ?usize = null;
|
|
var current_dep_map: ?*std.StringHashMap([]const u8) = null;
|
|
var lines = std.mem.splitScalar(u8, data, '\n');
|
|
|
|
while (lines.next()) |line| {
|
|
if (line.len == 0 or strings.hasPrefixComptime(line, "#")) continue;
|
|
|
|
const indent = blk: {
|
|
var count: usize = 0;
|
|
for (line) |c| {
|
|
if (c == ' ') {
|
|
count += 1;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
break :blk count;
|
|
};
|
|
|
|
const content = line[indent..];
|
|
if (content.len == 0) continue;
|
|
|
|
switch (indent) {
|
|
0 => {
|
|
current_entry_idx = null;
|
|
current_dep_map = null;
|
|
|
|
if (!strings.hasSuffixComptime(content, ":")) {
|
|
try log.addErrorFmt(null, logger.Loc.Empty, allocator, "Invalid yarn.lock entry (missing colon): {s}", .{content});
|
|
return error.YarnLockfileParseError;
|
|
}
|
|
|
|
const specs_str = content[0 .. content.len - 1];
|
|
var entry = YarnEntry.init(allocator);
|
|
|
|
var spec_iter = std.mem.splitSequence(u8, specs_str, ", ");
|
|
while (spec_iter.next()) |spec_raw| {
|
|
const spec = strings.trim(spec_raw, " \t\"");
|
|
const spec_copy = try allocator.dupe(u8, spec);
|
|
try entry.specs.append(spec_copy);
|
|
}
|
|
|
|
try entries.append(entry);
|
|
current_entry_idx = entries.items.len - 1;
|
|
},
|
|
2 => {
|
|
if (current_entry_idx == null) continue;
|
|
|
|
if (strings.indexOfChar(content, ' ')) |space_idx| {
|
|
const key = content[0..space_idx];
|
|
const value_raw = content[space_idx + 1 ..];
|
|
const value = strings.trim(value_raw, " \t\"");
|
|
|
|
var entry = &entries.items[current_entry_idx.?];
|
|
if (strings.eqlComptime(key, "version")) {
|
|
entry.version = try allocator.dupe(u8, value);
|
|
} else if (strings.eqlComptime(key, "resolved")) {
|
|
entry.resolved = try allocator.dupe(u8, value);
|
|
} else if (strings.eqlComptime(key, "integrity")) {
|
|
entry.integrity = try allocator.dupe(u8, value);
|
|
} else if (strings.eqlComptime(key, "dependencies")) {
|
|
current_dep_map = &entry.dependencies;
|
|
} else if (strings.eqlComptime(key, "optionalDependencies")) {
|
|
current_dep_map = &entry.optional_dependencies;
|
|
}
|
|
} else if (strings.hasSuffixComptime(content, ":")) {
|
|
const key = content[0 .. content.len - 1];
|
|
var entry = &entries.items[current_entry_idx.?];
|
|
if (strings.eqlComptime(key, "dependencies")) {
|
|
current_dep_map = &entry.dependencies;
|
|
} else if (strings.eqlComptime(key, "optionalDependencies")) {
|
|
current_dep_map = &entry.optional_dependencies;
|
|
}
|
|
}
|
|
},
|
|
4 => {
|
|
if (current_dep_map) |dep_map| {
|
|
if (strings.indexOfChar(content, ' ')) |space_idx| {
|
|
const dep_name_raw = content[0..space_idx];
|
|
const dep_name = strings.trim(dep_name_raw, " \t\"");
|
|
const dep_version_raw = content[space_idx + 1 ..];
|
|
const dep_version = strings.trim(dep_version_raw, " \t\"");
|
|
|
|
const name_copy = try allocator.dupe(u8, dep_name);
|
|
errdefer allocator.free(name_copy);
|
|
const version_copy = try allocator.dupe(u8, dep_version);
|
|
errdefer allocator.free(version_copy);
|
|
|
|
try dep_map.put(name_copy, version_copy);
|
|
}
|
|
}
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
return entries;
|
|
}
|
|
|
|
fn extractPackageName(spec: []const u8) []const u8 {
|
|
const at_idx = if (strings.hasPrefixComptime(spec, "@"))
|
|
strings.indexOfChar(spec[1..], '@')
|
|
else
|
|
strings.indexOfChar(spec, '@');
|
|
|
|
if (at_idx) |idx| {
|
|
if (strings.hasPrefixComptime(spec, "@")) {
|
|
return spec[0 .. idx + 1];
|
|
}
|
|
return spec[0..idx];
|
|
}
|
|
|
|
return spec;
|
|
}
|
|
|
|
fn isDefaultRegistry(url: []const u8) bool {
|
|
return strings.containsComptime(url, "registry.yarnpkg.com") or
|
|
strings.containsComptime(url, "registry.npmjs.org");
|
|
}
|
|
|
|
fn parsePackageJsonDependencies(
|
|
lockfile: *Lockfile,
|
|
manager: *PackageManager,
|
|
allocator: std.mem.Allocator,
|
|
pkg_json: *const Expr,
|
|
string_buf: *String.Buf,
|
|
log: *logger.Log,
|
|
) !struct { u32, u32 } {
|
|
const dependency_groups = [_]struct { []const u8, Dependency.Behavior }{
|
|
.{ "dependencies", .{ .prod = true } },
|
|
.{ "devDependencies", .{ .dev = true } },
|
|
.{ "optionalDependencies", .{ .optional = true } },
|
|
.{ "peerDependencies", .{ .peer = true } },
|
|
};
|
|
|
|
const off = lockfile.buffers.dependencies.items.len;
|
|
|
|
for (dependency_groups) |group| {
|
|
const group_name, const group_behavior = group;
|
|
if (pkg_json.get(group_name)) |deps| {
|
|
if (!deps.isObject()) continue;
|
|
|
|
for (deps.data.e_object.properties.slice()) |prop| {
|
|
const key = prop.key.?;
|
|
const value = prop.value.?;
|
|
|
|
const name_str = key.asString(allocator) orelse continue;
|
|
const name_hash = String.Builder.stringHash(name_str);
|
|
const name = try string_buf.appendExternalWithHash(name_str, name_hash);
|
|
|
|
const version_str = value.asString(allocator) orelse continue;
|
|
const version = try string_buf.append(version_str);
|
|
const version_sliced = version.sliced(string_buf.bytes.items);
|
|
|
|
const dep: Dependency = .{
|
|
.name = name.value,
|
|
.name_hash = name.hash,
|
|
.behavior = group_behavior,
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
name.value,
|
|
name.hash,
|
|
version_sliced.slice,
|
|
&version_sliced,
|
|
log,
|
|
manager,
|
|
) orelse continue,
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, dep);
|
|
}
|
|
}
|
|
}
|
|
|
|
const end = lockfile.buffers.dependencies.items.len;
|
|
|
|
std.sort.pdq(
|
|
Dependency,
|
|
lockfile.buffers.dependencies.items[off..],
|
|
string_buf.bytes.items,
|
|
Dependency.isLessThan,
|
|
);
|
|
|
|
return .{ @intCast(off), @intCast(end - off) };
|
|
}
|
|
|
|
fn parseYarnDependencies(
|
|
lockfile: *Lockfile,
|
|
allocator: std.mem.Allocator,
|
|
entry: *const YarnEntry,
|
|
string_buf: *String.Buf,
|
|
log: *logger.Log,
|
|
) !struct { u32, u32 } {
|
|
const off = lockfile.buffers.dependencies.items.len;
|
|
|
|
var dep_iter = entry.dependencies.iterator();
|
|
while (dep_iter.next()) |kv| {
|
|
const name_str = kv.key_ptr.*;
|
|
const version_str = kv.value_ptr.*;
|
|
|
|
const name_hash = String.Builder.stringHash(name_str);
|
|
const name = try string_buf.appendExternalWithHash(name_str, name_hash);
|
|
|
|
const version = try string_buf.append(version_str);
|
|
const version_sliced = version.sliced(string_buf.bytes.items);
|
|
|
|
const dep: Dependency = .{
|
|
.name = name.value,
|
|
.name_hash = name.hash,
|
|
.behavior = .{ .prod = true },
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
name.value,
|
|
name.hash,
|
|
version_sliced.slice,
|
|
&version_sliced,
|
|
log,
|
|
null,
|
|
) orelse continue,
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, dep);
|
|
}
|
|
|
|
var opt_dep_iter = entry.optional_dependencies.iterator();
|
|
while (opt_dep_iter.next()) |kv| {
|
|
const name_str = kv.key_ptr.*;
|
|
const version_str = kv.value_ptr.*;
|
|
|
|
const name_hash = String.Builder.stringHash(name_str);
|
|
const name = try string_buf.appendExternalWithHash(name_str, name_hash);
|
|
|
|
const version = try string_buf.append(version_str);
|
|
const version_sliced = version.sliced(string_buf.bytes.items);
|
|
|
|
const dep: Dependency = .{
|
|
.name = name.value,
|
|
.name_hash = name.hash,
|
|
.behavior = .{ .optional = true },
|
|
.version = Dependency.parse(
|
|
allocator,
|
|
name.value,
|
|
name.hash,
|
|
version_sliced.slice,
|
|
&version_sliced,
|
|
log,
|
|
null,
|
|
) orelse continue,
|
|
};
|
|
|
|
try lockfile.buffers.dependencies.append(allocator, dep);
|
|
}
|
|
|
|
const end = lockfile.buffers.dependencies.items.len;
|
|
|
|
std.sort.pdq(
|
|
Dependency,
|
|
lockfile.buffers.dependencies.items[off..],
|
|
string_buf.bytes.items,
|
|
Dependency.isLessThan,
|
|
);
|
|
|
|
return .{ @intCast(off), @intCast(end - off) };
|
|
}
|