mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
7 Commits
kai/spawn-
...
dylan/gith
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c2a7c6b82 | ||
|
|
766f8ceebc | ||
|
|
c03f7c998d | ||
|
|
beb03c3c54 | ||
|
|
8846ae2454 | ||
|
|
a4c379d316 | ||
|
|
25b080a05e |
@@ -13,6 +13,8 @@ const string = @import("../string_types.zig").string;
|
||||
const strings = @import("../string_immutable.zig");
|
||||
const bun = @import("bun");
|
||||
|
||||
const Repository = @import("./repository.zig").Repository;
|
||||
|
||||
pub const Pair = struct {
|
||||
resolution_id: Install.PackageID = Install.invalid_package_id,
|
||||
dependency: Dependency = .{},
|
||||
@@ -235,6 +237,8 @@ pub const Version = struct {
|
||||
.folder, .dist_tag => lhs.literal.eql(rhs.literal, lhs_buf, rhs_buf),
|
||||
.tarball => lhs.value.tarball.eql(rhs.value.tarball, lhs_buf, rhs_buf),
|
||||
.symlink => lhs.value.symlink.eql(rhs.value.symlink, lhs_buf, rhs_buf),
|
||||
.git => lhs.value.git.eql(rhs.value.git, lhs_buf, rhs_buf),
|
||||
.github => lhs.value.github.eql(rhs.value.github, lhs_buf, rhs_buf),
|
||||
else => true,
|
||||
};
|
||||
}
|
||||
@@ -259,17 +263,23 @@ pub const Version = struct {
|
||||
/// https://stackoverflow.com/questions/51954956/whats-the-difference-between-yarn-link-and-npm-link
|
||||
symlink = 5,
|
||||
|
||||
// git+https://example.com/repo#commit
|
||||
git = 7,
|
||||
|
||||
// profile/repo#commit
|
||||
github = 8,
|
||||
|
||||
/// TODO:
|
||||
workspace = 6,
|
||||
/// TODO:
|
||||
git = 7,
|
||||
/// TODO:
|
||||
github = 8,
|
||||
|
||||
pub inline fn isNPM(this: Tag) bool {
|
||||
return @enumToInt(this) < 3;
|
||||
}
|
||||
|
||||
pub inline fn isGitHub(this: Tag) bool {
|
||||
return @enumToInt(this) == 8;
|
||||
}
|
||||
|
||||
pub inline fn isGitHubRepoPath(dependency: string) bool {
|
||||
var slash_count: u8 = 0;
|
||||
|
||||
@@ -279,7 +289,7 @@ pub const Version = struct {
|
||||
|
||||
// Must be alphanumeric
|
||||
switch (c) {
|
||||
'\\', '/', 'a'...'z', 'A'...'Z', '0'...'9', '%' => {},
|
||||
'\\', '/', 'a'...'z', 'A'...'Z', '0'...'9', '%', '-' => {},
|
||||
else => return false,
|
||||
}
|
||||
}
|
||||
@@ -495,16 +505,14 @@ pub const Version = struct {
|
||||
dist_tag: String,
|
||||
tarball: URI,
|
||||
folder: String,
|
||||
git: Repository,
|
||||
github: Repository,
|
||||
|
||||
/// Equivalent to npm link
|
||||
symlink: String,
|
||||
|
||||
/// Unsupported, but still parsed so an error can be thrown
|
||||
workspace: void,
|
||||
/// Unsupported, but still parsed so an error can be thrown
|
||||
git: void,
|
||||
/// Unsupported, but still parsed so an error can be thrown
|
||||
github: void,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -580,6 +588,46 @@ pub fn parseWithTag(
|
||||
.tag = .npm,
|
||||
};
|
||||
},
|
||||
.git => return Version{
|
||||
.literal = sliced.value(),
|
||||
.value = .{
|
||||
.git = Repository.parse(sliced) catch |err| {
|
||||
if (log_) |log| log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
allocator,
|
||||
"{s} parsing dependency \"{s}\"",
|
||||
.{
|
||||
@errorName(err),
|
||||
dependency,
|
||||
},
|
||||
) catch unreachable;
|
||||
return null;
|
||||
},
|
||||
},
|
||||
.tag = .git,
|
||||
},
|
||||
.github => {
|
||||
return Version{
|
||||
.literal = sliced.value(),
|
||||
.value = .{
|
||||
.github = Repository.parseGitHub(sliced) catch |err| {
|
||||
if (log_) |log| log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
allocator,
|
||||
"{s} parsing dependency \"{s}\"",
|
||||
.{
|
||||
@errorName(err),
|
||||
dependency,
|
||||
},
|
||||
) catch unreachable;
|
||||
return null;
|
||||
},
|
||||
},
|
||||
.tag = .github,
|
||||
};
|
||||
},
|
||||
.dist_tag => {
|
||||
return Version{
|
||||
.literal = sliced.value(),
|
||||
@@ -652,8 +700,8 @@ pub fn parseWithTag(
|
||||
.literal = sliced.value(),
|
||||
};
|
||||
},
|
||||
.workspace, .git, .github => {
|
||||
if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "Support for dependency type \"{s}\" is not implemented yet (\"{s}\")", .{ @tagName(tag), dependency }) catch unreachable;
|
||||
.workspace => {
|
||||
if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "Dependency type not implemented yet {s} for \"{s}\"", .{ @tagName(tag), dependency }) catch unreachable;
|
||||
return null;
|
||||
},
|
||||
}
|
||||
|
||||
@@ -183,6 +183,7 @@ const NetworkTask = struct {
|
||||
},
|
||||
extract: ExtractTarball,
|
||||
binlink: void,
|
||||
git_clone: void,
|
||||
},
|
||||
|
||||
pub fn notify(this: *NetworkTask, _: anytype) void {
|
||||
@@ -469,6 +470,8 @@ pub const Features = struct {
|
||||
.optional_dependencies = true,
|
||||
};
|
||||
|
||||
pub const git = npm;
|
||||
|
||||
pub const tarball = npm;
|
||||
|
||||
pub const npm_manifest = Features{
|
||||
@@ -506,6 +509,14 @@ const Task = struct {
|
||||
return @as(u64, @truncate(u63, hasher.final())) | @as(u64, 1 << 63);
|
||||
}
|
||||
|
||||
pub fn forGitHubPackage(repo: string, owner: string) u64 {
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
hasher.update(owner);
|
||||
hasher.update("/~~");
|
||||
hasher.update(repo);
|
||||
return @as(u64, @truncate(u63, hasher.final())) | @as(u64, 1 << 63);
|
||||
}
|
||||
|
||||
pub fn forBinLink(package_id: PackageID) u64 {
|
||||
const hash = std.hash.Wyhash.hash(0, std.mem.asBytes(&package_id));
|
||||
return @as(u64, @truncate(u62, hash)) | @as(u64, 1 << 62) | @as(u64, 1 << 63);
|
||||
@@ -528,6 +539,95 @@ const Task = struct {
|
||||
defer this.package_manager.wake();
|
||||
|
||||
switch (this.tag) {
|
||||
.git_clone => {
|
||||
const allocator = this.package_manager.allocator;
|
||||
|
||||
const PATH = this.package_manager.env_loader.get("PATH") orelse "";
|
||||
|
||||
var git_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
if (which(&git_path_buf, PATH, ".", "git")) |git| {
|
||||
const lockfile = this.package_manager.lockfile;
|
||||
const git_path = std.mem.span(git);
|
||||
|
||||
const git_repo = this.request.git_clone.repository;
|
||||
|
||||
const repo_name = lockfile.str(git_repo.repo);
|
||||
|
||||
var url_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const url = if (this.request.git_clone.version.tag == .github) git_repo.getGitHubURL(lockfile, &url_buf) else git_repo.getURLForClone(lockfile, &url_buf);
|
||||
|
||||
var temp_dir_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const temp_dir = std.os.getFdPath(this.package_manager.getTemporaryDirectory().dir.fd, &temp_dir_path_buf) catch unreachable;
|
||||
|
||||
var destination_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
std.mem.copy(u8, &destination_buf, temp_dir);
|
||||
destination_buf[temp_dir.len] = '/';
|
||||
std.mem.copy(u8, destination_buf[temp_dir.len + 1 ..], repo_name);
|
||||
const destination = destination_buf[0 .. temp_dir.len + repo_name.len + 1];
|
||||
|
||||
const args = [_]string{
|
||||
git_path,
|
||||
"clone",
|
||||
"-q",
|
||||
url,
|
||||
destination,
|
||||
};
|
||||
|
||||
var process = std.ChildProcess.init(&args, allocator);
|
||||
process.stdout_behavior = .Close;
|
||||
process.stderr_behavior = .Close;
|
||||
process.stdin_behavior = .Close;
|
||||
|
||||
const term = process.spawnAndWait() catch {
|
||||
this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "Failed to spawn git process to clone git dependency \"{s}\"", .{repo_name}) catch unreachable;
|
||||
this.status = .fail;
|
||||
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
|
||||
return;
|
||||
};
|
||||
switch (term) {
|
||||
else => {},
|
||||
}
|
||||
|
||||
// get package.json bytes, send pointer back to main thread
|
||||
var package_json_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
std.mem.copy(u8, &package_json_path_buf, destination);
|
||||
std.mem.copy(u8, package_json_path_buf[destination.len..], "/package.json");
|
||||
|
||||
const package_json_path = package_json_path_buf[0 .. destination.len + "/package.json".len];
|
||||
|
||||
const package_json_file = std.fs.openFileAbsolute(package_json_path, .{}) catch {
|
||||
this.status = .fail;
|
||||
this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "Failed to find package.json for git dependency \"{s}\"", .{repo_name}) catch unreachable;
|
||||
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
|
||||
return;
|
||||
};
|
||||
|
||||
const package_json_file_stat = package_json_file.stat() catch unreachable;
|
||||
const package_json_file_size = package_json_file_stat.size;
|
||||
|
||||
const package_json_source = allocator.alloc(u8, package_json_file_size) catch unreachable;
|
||||
_ = package_json_file.preadAll(package_json_source, 0) catch unreachable;
|
||||
|
||||
if (package_json_file_size < "{\"name\":\"\",\"version\":\"\"}".len + repo_name.len + "0.0.0".len) {
|
||||
// package.json smaller than minimum possible
|
||||
this.status = .fail;
|
||||
this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "Invalid package.json for git dependency \"{s}\"", .{repo_name}) catch unreachable;
|
||||
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
|
||||
return;
|
||||
}
|
||||
|
||||
this.status = .success;
|
||||
this.data = .{ .package_json = package_json_source };
|
||||
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
|
||||
return;
|
||||
}
|
||||
|
||||
this.status = .fail;
|
||||
this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "Failed to find git executable", .{}) catch unreachable;
|
||||
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
|
||||
return;
|
||||
},
|
||||
.package_manifest => {
|
||||
var allocator = bun.default_allocator;
|
||||
const package_manifest = Npm.Registry.getPackageMetadata(
|
||||
@@ -595,10 +695,11 @@ const Task = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub const Tag = enum(u2) {
|
||||
pub const Tag = enum(u3) {
|
||||
package_manifest = 1,
|
||||
extract = 2,
|
||||
binlink = 3,
|
||||
git_clone = 4,
|
||||
// install = 3,
|
||||
};
|
||||
|
||||
@@ -612,6 +713,7 @@ const Task = struct {
|
||||
package_manifest: Npm.PackageManifest,
|
||||
extract: string,
|
||||
binlink: bool,
|
||||
package_json: string,
|
||||
};
|
||||
|
||||
pub const Request = union {
|
||||
@@ -626,12 +728,18 @@ const Task = struct {
|
||||
tarball: ExtractTarball,
|
||||
},
|
||||
binlink: Bin.Linker,
|
||||
git_clone: struct {
|
||||
repository: Repository,
|
||||
version: Dependency.Version,
|
||||
dependency_id: u32,
|
||||
},
|
||||
// install: PackageInstall,
|
||||
};
|
||||
};
|
||||
|
||||
const PackageInstall = struct {
|
||||
cache_dir: std.fs.IterableDir,
|
||||
git_cache_dir: std.fs.IterableDir,
|
||||
destination_dir: std.fs.IterableDir,
|
||||
cache_dir_subpath: stringZ = "",
|
||||
destination_dir_subpath: stringZ = "",
|
||||
@@ -688,6 +796,7 @@ const PackageInstall = struct {
|
||||
|
||||
this.package_install = PackageInstall{
|
||||
.cache_dir = undefined,
|
||||
.git_cache_dir = undefined,
|
||||
.cache_dir_subpath = undefined,
|
||||
.progress = ctx.progress,
|
||||
|
||||
@@ -1436,6 +1545,7 @@ const Waker = AsyncIO.Waker;
|
||||
// 2.
|
||||
pub const PackageManager = struct {
|
||||
cache_directory_: ?std.fs.IterableDir = null,
|
||||
git_cache_directory_: ?std.fs.IterableDir = null,
|
||||
temp_dir_: ?std.fs.IterableDir = null,
|
||||
root_dir: *Fs.FileSystem.DirEntry,
|
||||
env_loader: *DotEnv.Loader,
|
||||
@@ -1760,6 +1870,17 @@ pub const PackageManager = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub noinline fn getGitCacheDirectory(this: *PackageManager) std.fs.IterableDir {
|
||||
return this.git_cache_directory_ orelse brk: {
|
||||
const cache_dir = this.getCacheDirectory();
|
||||
this.git_cache_directory_ = cache_dir.dir.openIterableDir("../git", .{}) catch {
|
||||
this.git_cache_directory_ = cache_dir.dir.makeOpenPathIterable("../git", .{}) catch unreachable;
|
||||
break :brk this.git_cache_directory_.?;
|
||||
};
|
||||
break :brk this.git_cache_directory_.?;
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn getTemporaryDirectory(this: *PackageManager) std.fs.IterableDir {
|
||||
return this.temp_dir_ orelse brk: {
|
||||
this.temp_dir_ = this.ensureTemporaryDirectory();
|
||||
@@ -2022,8 +2143,8 @@ pub const PackageManager = struct {
|
||||
}
|
||||
|
||||
pub fn resolveFromDiskCache(this: *PackageManager, package_name: []const u8, version: Dependency.Version) ?PackageID {
|
||||
if (version.tag != .npm) {
|
||||
// only npm supported right now
|
||||
if (version.tag != .npm and version.tag != .github and version.tag != .git) {
|
||||
// only npm, git, and github supported right now
|
||||
// tags are more ambiguous
|
||||
return null;
|
||||
}
|
||||
@@ -2047,32 +2168,38 @@ pub const PackageManager = struct {
|
||||
Semver.Version.sortGt,
|
||||
);
|
||||
for (installed_versions.items) |installed_version| {
|
||||
if (version.value.npm.satisfies(installed_version)) {
|
||||
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var npm_package_path = this.pathForCachedNPMPath(&buf, package_name, installed_version) catch |err| {
|
||||
Output.debug("error getting path for cached npm path: {s}", .{std.mem.span(@errorName(err))});
|
||||
return null;
|
||||
};
|
||||
const dependency = Dependency.Version{
|
||||
.tag = .npm,
|
||||
.value = .{
|
||||
.npm = Semver.Query.Group.from(installed_version),
|
||||
},
|
||||
};
|
||||
switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) {
|
||||
.new_package_id => |id| {
|
||||
this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
|
||||
return id;
|
||||
},
|
||||
.package_id => |id| {
|
||||
this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
|
||||
return id;
|
||||
},
|
||||
.err => |err| {
|
||||
Output.debug("error getting or putting folder resolution: {s}", .{std.mem.span(@errorName(err))});
|
||||
return null;
|
||||
},
|
||||
}
|
||||
switch (version.tag) {
|
||||
.npm => {
|
||||
if (version.value.npm.satisfies(installed_version)) {
|
||||
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var npm_package_path = this.pathForCachedNPMPath(&buf, package_name, installed_version) catch |err| {
|
||||
Output.debug("error getting path for cached npm path: {s}", .{std.mem.span(@errorName(err))});
|
||||
return null;
|
||||
};
|
||||
const dependency = Dependency.Version{
|
||||
.tag = .npm,
|
||||
.value = .{
|
||||
.npm = Semver.Query.Group.from(installed_version),
|
||||
},
|
||||
};
|
||||
switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) {
|
||||
.new_package_id => |id| {
|
||||
this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
|
||||
return id;
|
||||
},
|
||||
.package_id => |id| {
|
||||
this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
|
||||
return id;
|
||||
},
|
||||
.err => |err| {
|
||||
Output.debug("error getting or putting folder resolution: {s}", .{std.mem.span(@errorName(err))});
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
// TODO: handle git and github
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2347,6 +2474,49 @@ pub const PackageManager = struct {
|
||||
);
|
||||
},
|
||||
|
||||
.github => {
|
||||
var cache_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const cache_path = try version.value.github.getCachePathForGitHub(this, &cache_path_buf);
|
||||
|
||||
const res = FolderResolution.getOrPut(.{ .git_cache_folder = cache_path }, version, cache_path, this);
|
||||
|
||||
switch (res) {
|
||||
.err => |err| return err,
|
||||
.package_id => |package_id| {
|
||||
successFn(this, dependency_id, package_id);
|
||||
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id) };
|
||||
},
|
||||
.new_package_id => |package_id| {
|
||||
successFn(this, dependency_id, package_id);
|
||||
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id), .is_first_time = true };
|
||||
},
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
.git => {
|
||||
var cache_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
const cache_path = try version.value.git.getCachePath(this, &cache_path_buf);
|
||||
|
||||
const res = FolderResolution.getOrPut(.{ .git_cache_folder = cache_path }, version, cache_path, this);
|
||||
|
||||
switch (res) {
|
||||
.err => |err| return err,
|
||||
.package_id => |package_id| {
|
||||
successFn(this, dependency_id, package_id);
|
||||
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id) };
|
||||
},
|
||||
.new_package_id => |package_id| {
|
||||
successFn(this, dependency_id, package_id);
|
||||
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id), .is_first_time = true };
|
||||
},
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
.folder => {
|
||||
// relative to cwd
|
||||
const res = FolderResolution.getOrPut(.{ .relative = void{} }, version, version.value.folder.slice(this.lockfile.buffers.string_bytes.items), this);
|
||||
@@ -2408,6 +2578,32 @@ pub const PackageManager = struct {
|
||||
return &task.threadpool_task;
|
||||
}
|
||||
|
||||
fn enqueueCloneGitPackage(
|
||||
this: *PackageManager,
|
||||
task_id: u64,
|
||||
repository: Repository,
|
||||
dependency_id: u32,
|
||||
dep_version: Dependency.Version,
|
||||
) *ThreadPool.Task {
|
||||
var task = this.allocator.create(Task) catch unreachable;
|
||||
task.* = Task{
|
||||
.package_manager = &PackageManager.instance,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.repository = repository,
|
||||
.version = dep_version,
|
||||
.dependency_id = dependency_id,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.data = undefined,
|
||||
};
|
||||
|
||||
return &task.threadpool_task;
|
||||
}
|
||||
|
||||
fn enqueueExtractNPMPackage(
|
||||
this: *PackageManager,
|
||||
tarball: ExtractTarball,
|
||||
@@ -2543,13 +2739,60 @@ pub const PackageManager = struct {
|
||||
if (!this.isRootDependency(id))
|
||||
if (!dependency.behavior.isEnabled(switch (dependency.version.tag) {
|
||||
.folder => this.options.remote_package_features,
|
||||
.dist_tag, .npm => this.options.remote_package_features,
|
||||
.dist_tag, .npm, .git, .github => this.options.remote_package_features,
|
||||
else => Features{},
|
||||
}))
|
||||
return;
|
||||
}
|
||||
|
||||
switch (dependency.version.tag) {
|
||||
.github, .git => {
|
||||
var resolve_result = this.getOrPutResolvedPackage(
|
||||
name_hash,
|
||||
name,
|
||||
version,
|
||||
dependency.behavior,
|
||||
id,
|
||||
resolution,
|
||||
successFn,
|
||||
) catch |err| brk: {
|
||||
if (err == error.MissingPackageJSON) {
|
||||
break :brk @as(?ResolvedPackageResult, null);
|
||||
}
|
||||
|
||||
return err;
|
||||
};
|
||||
|
||||
if (resolve_result == null) {
|
||||
const lockfile = this.lockfile;
|
||||
|
||||
const repo = if (version.tag == .github) version.value.github else version.value.git;
|
||||
|
||||
const task_id = Task.Id.forGitHubPackage(lockfile.str(repo.repo), lockfile.str(repo.owner));
|
||||
const network_id = try this.network_dedupe_map.getOrPutContext(this.allocator, task_id, .{});
|
||||
if (!network_id.found_existing) {
|
||||
var batch = ThreadPool.Batch{};
|
||||
batch.push(ThreadPool.Batch.from(this.enqueueCloneGitPackage(
|
||||
task_id,
|
||||
repo,
|
||||
id,
|
||||
dependency.version,
|
||||
)));
|
||||
|
||||
const count = batch.len;
|
||||
this.pending_tasks += @truncate(u32, count);
|
||||
this.total_tasks += @truncate(u32, count);
|
||||
this.thread_pool.schedule(batch);
|
||||
}
|
||||
var task_queue = try this.task_queue.getOrPutContext(this.allocator, task_id, .{});
|
||||
if (!task_queue.found_existing) {
|
||||
task_queue.value_ptr.* = TaskCallbackList{};
|
||||
}
|
||||
|
||||
const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency";
|
||||
try task_queue.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id));
|
||||
}
|
||||
},
|
||||
.folder, .npm, .dist_tag => {
|
||||
retry_from_manifests_ptr: while (true) {
|
||||
var resolve_result_ = this.getOrPutResolvedPackage(
|
||||
@@ -3293,6 +3536,7 @@ pub const PackageManager = struct {
|
||||
batch.push(ThreadPool.Batch.from(manager.enqueueExtractNPMPackage(extract, task)));
|
||||
},
|
||||
.binlink => {},
|
||||
.git_clone => {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3363,6 +3607,70 @@ pub const PackageManager = struct {
|
||||
}
|
||||
}
|
||||
},
|
||||
.git_clone => {
|
||||
if (task.status == .fail) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const allocator = manager.allocator;
|
||||
const package_json = task.data.package_json;
|
||||
defer allocator.free(package_json);
|
||||
const git_repo = task.request.git_clone.repository;
|
||||
var package_name = manager.lockfile.str(git_repo.repo);
|
||||
|
||||
package_name = manager.lockfile.str(task.request.git_clone.repository.repo);
|
||||
|
||||
const temp_dir = manager.getTemporaryDirectory().dir;
|
||||
var temp_dir_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
_ = try std.os.getFdPath(temp_dir.fd, &temp_dir_buf);
|
||||
const git_cache_dir = manager.getGitCacheDirectory().dir;
|
||||
git_cache_dir.deleteTree(package_name) catch unreachable;
|
||||
|
||||
const version = task.request.git_clone.version;
|
||||
var destination_name_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const destination_name = if (version.tag == .github) try git_repo.getCacheDirectoryForGitHub(manager, &destination_name_buf) else try git_repo.getCacheDirectory(manager, &destination_name_buf);
|
||||
std.fs.rename(temp_dir, package_name, git_cache_dir, destination_name) catch unreachable;
|
||||
temp_dir.deleteTree(package_name) catch unreachable;
|
||||
|
||||
var cache_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const cache_path = try git_repo.getCachePathForGitHub(manager, &cache_path_buf);
|
||||
const res = FolderResolution.getOrPutWithPackageJSONBytes(
|
||||
.{ .git_cache_folder = cache_path },
|
||||
task.request.git_clone.version,
|
||||
cache_path,
|
||||
manager,
|
||||
package_json,
|
||||
);
|
||||
|
||||
const dependency_id = task.request.git_clone.dependency_id;
|
||||
const pkg_id = brk: {
|
||||
switch (res) {
|
||||
.err => |err| return err,
|
||||
.package_id => |package_id| {
|
||||
manager.assignResolution(dependency_id, package_id);
|
||||
break :brk package_id;
|
||||
},
|
||||
.new_package_id => |package_id| {
|
||||
manager.assignResolution(dependency_id, package_id);
|
||||
break :brk package_id;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const dependencies = manager.lockfile.packages.items(.dependencies)[pkg_id];
|
||||
|
||||
var dependency_list_entry = manager.task_queue.getEntry(task.id).?;
|
||||
var dependency_list = dependency_list_entry.value_ptr.*;
|
||||
dependency_list_entry.value_ptr.* = .{};
|
||||
|
||||
const end = dependencies.off + dependencies.len;
|
||||
var i = dependencies.off;
|
||||
while (i < end) : (i += 1) {
|
||||
dependency_list.append(allocator, @unionInit(TaskCallbackContext, "dependency", i)) catch unreachable;
|
||||
}
|
||||
|
||||
try manager.processDependencyList(dependency_list, ExtractCompletionContext, extract_ctx, callbacks);
|
||||
},
|
||||
.extract => {
|
||||
if (task.status == .fail) {
|
||||
const err = task.err orelse error.TarballFailedToExtract;
|
||||
@@ -5041,8 +5349,8 @@ pub const PackageManager = struct {
|
||||
if (unscoped_name.len > i + 1) request.version_buf = unscoped_name[i + 1 ..];
|
||||
}
|
||||
|
||||
if (strings.hasPrefix("http://", request.name) or
|
||||
strings.hasPrefix("https://", request.name))
|
||||
if (strings.hasPrefixComptime(request.name, "http://") or
|
||||
strings.hasPrefixComptime(request.name, "https://"))
|
||||
{
|
||||
if (Output.isEmojiEnabled()) {
|
||||
Output.prettyErrorln("<r>😢 <red>error<r><d>:<r> bun {s} http://url is not implemented yet.", .{
|
||||
@@ -5068,8 +5376,8 @@ pub const PackageManager = struct {
|
||||
request.version_buf = std.mem.trim(u8, request.version_buf, "\n\r\t");
|
||||
|
||||
// https://github.com/npm/npm-package-arg/blob/fbaf2fd0b72a0f38e7c24260fd4504f4724c9466/npa.js#L330
|
||||
if (strings.hasPrefix("https://", request.version_buf) or
|
||||
strings.hasPrefix("http://", request.version_buf))
|
||||
if (strings.hasPrefixComptime(request.version_buf, "https://") or
|
||||
strings.hasPrefixComptime(request.version_buf, "http://"))
|
||||
{
|
||||
if (Output.isEmojiEnabled()) {
|
||||
Output.prettyErrorln("<r>😢 <red>error<r><d>:<r> bun {s} http://url is not implemented yet.", .{
|
||||
@@ -5636,6 +5944,7 @@ pub const PackageManager = struct {
|
||||
var installer = PackageInstall{
|
||||
.progress = this.progress,
|
||||
.cache_dir = undefined,
|
||||
.git_cache_dir = undefined,
|
||||
.cache_dir_subpath = undefined,
|
||||
.destination_dir = this.node_modules_folder,
|
||||
.destination_dir_subpath = destination_dir_subpath,
|
||||
@@ -5665,6 +5974,22 @@ pub const PackageManager = struct {
|
||||
installer.cache_dir = .{ .dir = std.fs.cwd() };
|
||||
}
|
||||
},
|
||||
.git => {
|
||||
const repo: Repository = resolution.value.git;
|
||||
const temp = repo.getCacheDirectory(this.manager, &cached_package_folder_name_buf) catch unreachable;
|
||||
cached_package_folder_name_buf[temp.len] = 0;
|
||||
const cache_dir_subpath: stringZ = std.meta.assumeSentinel(cached_package_folder_name_buf[0..temp.len], 0);
|
||||
installer.cache_dir_subpath = cache_dir_subpath;
|
||||
installer.cache_dir = .{ .dir = this.manager.getGitCacheDirectory().dir };
|
||||
},
|
||||
.github => {
|
||||
const repo: Repository = resolution.value.github;
|
||||
const temp = repo.getCacheDirectoryForGitHub(this.manager, &cached_package_folder_name_buf) catch unreachable;
|
||||
cached_package_folder_name_buf[temp.len] = 0;
|
||||
const cache_dir_subpath: stringZ = std.meta.assumeSentinel(cached_package_folder_name_buf[0..temp.len], 0);
|
||||
installer.cache_dir_subpath = cache_dir_subpath;
|
||||
installer.cache_dir = .{ .dir = this.manager.getGitCacheDirectory().dir };
|
||||
},
|
||||
.symlink => {
|
||||
const directory = this.manager.globalLinkDir() catch |err| {
|
||||
if (comptime log_level != .silent) {
|
||||
@@ -6370,6 +6695,7 @@ pub const PackageManager = struct {
|
||||
const changes = @truncate(PackageID, mapping.len);
|
||||
|
||||
_ = manager.getCacheDirectory();
|
||||
_ = manager.getGitCacheDirectory();
|
||||
_ = manager.getTemporaryDirectory();
|
||||
var counter_i: PackageID = 0;
|
||||
while (counter_i < changes) : (counter_i += 1) {
|
||||
@@ -6424,6 +6750,7 @@ pub const PackageManager = struct {
|
||||
|
||||
if (root.dependencies.len > 0) {
|
||||
_ = manager.getCacheDirectory();
|
||||
_ = manager.getGitCacheDirectory();
|
||||
_ = manager.getTemporaryDirectory();
|
||||
}
|
||||
manager.enqueueDependencyList(
|
||||
@@ -6440,6 +6767,7 @@ pub const PackageManager = struct {
|
||||
if (manager.pending_tasks > 0) {
|
||||
if (root.dependencies.len > 0) {
|
||||
_ = manager.getCacheDirectory();
|
||||
_ = manager.getGitCacheDirectory();
|
||||
_ = manager.getTemporaryDirectory();
|
||||
}
|
||||
|
||||
@@ -6649,13 +6977,12 @@ pub const PackageManager = struct {
|
||||
|
||||
if (install_summary.success > 0) {
|
||||
// it's confusing when it shows 3 packages and says it installed 1
|
||||
Output.pretty("\n <green>{d}<r> packages<r> installed ", .{@max(
|
||||
install_summary.success,
|
||||
@truncate(
|
||||
u32,
|
||||
manager.package_json_updates.len,
|
||||
),
|
||||
)});
|
||||
const count = @max(install_summary.success, @truncate(u32, manager.package_json_updates.len));
|
||||
if (count == 1) {
|
||||
Output.pretty("\n <green>1<r> package<r> installed ", .{});
|
||||
} else {
|
||||
Output.pretty("\n <green>{d}<r> packages<r> installed ", .{count});
|
||||
}
|
||||
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
|
||||
printed_timestamp = true;
|
||||
Output.pretty("<r>\n", .{});
|
||||
@@ -6670,7 +6997,11 @@ pub const PackageManager = struct {
|
||||
}
|
||||
}
|
||||
|
||||
Output.pretty("\n <r><b>{d}<r> packages removed ", .{manager.summary.remove});
|
||||
if (manager.summary.remove == 1) {
|
||||
Output.pretty("\n <r><b>1<r> package removed ", .{});
|
||||
} else {
|
||||
Output.pretty("\n <r><b>{d}<r> packages removed ", .{manager.summary.remove});
|
||||
}
|
||||
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
|
||||
printed_timestamp = true;
|
||||
Output.pretty("<r>\n", .{});
|
||||
@@ -6679,17 +7010,33 @@ pub const PackageManager = struct {
|
||||
|
||||
const count = @truncate(PackageID, manager.lockfile.packages.len);
|
||||
if (count != install_summary.skipped) {
|
||||
Output.pretty("Checked <green>{d} installs<r> across {d} packages <d>(no changes)<r> ", .{
|
||||
install_summary.skipped,
|
||||
count,
|
||||
});
|
||||
if (install_summary.skipped == 1 and count == 1) {
|
||||
Output.pretty("Checked <green>1 install<r> across 1 package <d>(no changes)<r> ", .{});
|
||||
} else if (install_summary.skipped == 1) {
|
||||
Output.pretty("Checked <green>1 install<r> across {d} packages <d>(no changes)<r> ", .{
|
||||
count,
|
||||
});
|
||||
} else if (count == 1) {
|
||||
Output.pretty("Checked <green>{d} installs<r> across 1 package <d>(no changes)<r> ", .{
|
||||
install_summary.skipped,
|
||||
});
|
||||
} else {
|
||||
Output.pretty("Checked <green>{d} installs<r> across {d} packages <d>(no changes)<r> ", .{
|
||||
install_summary.skipped,
|
||||
count,
|
||||
});
|
||||
}
|
||||
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
|
||||
printed_timestamp = true;
|
||||
Output.pretty("<r>\n", .{});
|
||||
} else {
|
||||
Output.pretty("<r> <green>Done<r>! Checked {d} packages<r> <d>(no changes)<r> ", .{
|
||||
install_summary.skipped,
|
||||
});
|
||||
if (install_summary.skipped == 1) {
|
||||
Output.pretty("<r> <green>Done<r>! Checked 1 package<r> <d>(no changes)<r> ", .{});
|
||||
} else {
|
||||
Output.pretty("<r> <green>Done<r>! Checked {d} packages<r> <d>(no changes)<r> ", .{
|
||||
install_summary.skipped,
|
||||
});
|
||||
}
|
||||
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
|
||||
printed_timestamp = true;
|
||||
Output.pretty("<r>\n", .{});
|
||||
@@ -6697,7 +7044,11 @@ pub const PackageManager = struct {
|
||||
}
|
||||
|
||||
if (install_summary.fail > 0) {
|
||||
Output.prettyln("<r>Failed to install <red><b>{d}<r> packages\n", .{install_summary.fail});
|
||||
if (install_summary.fail == 1) {
|
||||
Output.prettyln("<r>Failed to install <red><b>1<r> package\n", .{});
|
||||
} else {
|
||||
Output.prettyln("<r>Failed to install <red><b>{d}<r> packages\n", .{install_summary.fail});
|
||||
}
|
||||
Output.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
const PackageManager = @import("./install.zig").PackageManager;
|
||||
const Lockfile = @import("./lockfile.zig");
|
||||
const Semver = @import("./semver.zig");
|
||||
const ExternalString = Semver.ExternalString;
|
||||
const String = Semver.String;
|
||||
const SlicedString = Semver.SlicedString;
|
||||
const std = @import("std");
|
||||
const GitSHA = String;
|
||||
const bun = @import("../bun.zig");
|
||||
const string = @import("../string_types.zig").string;
|
||||
const strings = @import("../bun.zig").strings;
|
||||
const Environment = @import("../env.zig");
|
||||
const Group = Semver.Query.Group;
|
||||
|
||||
pub const Repository = extern struct {
|
||||
owner: String = String{},
|
||||
@@ -46,6 +51,174 @@ pub const Repository = extern struct {
|
||||
return try formatter.format(layout, opts, writer);
|
||||
}
|
||||
|
||||
pub fn getGitHubURL(this: Repository, lockfile: *Lockfile, buf: *[bun.MAX_PATH_BYTES]u8) []u8 {
|
||||
const github = "https://github.com/";
|
||||
const owner = lockfile.str(this.owner);
|
||||
const repo = lockfile.str(this.repo);
|
||||
const committish = lockfile.str(this.committish);
|
||||
|
||||
var i: usize = 0;
|
||||
std.mem.copy(u8, buf[i..], github);
|
||||
i += github.len;
|
||||
|
||||
std.mem.copy(u8, buf[i..], owner);
|
||||
i += owner.len;
|
||||
buf[i] = '/';
|
||||
i += 1;
|
||||
std.mem.copy(u8, buf[i..], repo);
|
||||
i += repo.len;
|
||||
if (committish.len > 0) {
|
||||
buf[i] = '#';
|
||||
i += 1;
|
||||
std.mem.copy(u8, buf[i..], committish);
|
||||
i += committish.len;
|
||||
}
|
||||
|
||||
return buf[0..i];
|
||||
}
|
||||
|
||||
pub fn getURL(this: Repository, lockfile: *Lockfile, buf: *[bun.MAX_PATH_BYTES]u8) []u8 {
|
||||
const owner = lockfile.str(this.owner);
|
||||
const repo = lockfile.str(this.repo);
|
||||
const committish = lockfile.str(this.committish);
|
||||
|
||||
var i: usize = 0;
|
||||
std.mem.copy(u8, buf[i..], owner);
|
||||
i += owner.len;
|
||||
buf[i] = '/';
|
||||
i += 1;
|
||||
std.mem.copy(u8, buf[i..], repo);
|
||||
i += repo.len;
|
||||
if (committish.len > 0) {
|
||||
buf[i] = '#';
|
||||
i += 1;
|
||||
std.mem.copy(u8, buf[i..], committish);
|
||||
i += committish.len;
|
||||
}
|
||||
|
||||
return buf[0..i];
|
||||
}
|
||||
|
||||
pub fn getURLForClone(this: Repository, lockfile: *Lockfile, buf: *[bun.MAX_PATH_BYTES]u8) []u8 {
|
||||
var url = this.getURL(lockfile, buf);
|
||||
|
||||
// replace ':' with '/' if it exists to make a valid url
|
||||
if (strings.lastIndexOfChar(url, ':')) |j| {
|
||||
if (url[j + 1] != '/') {
|
||||
url[j] = '/';
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
pub fn getCacheDirectoryForGitHub(this: Repository, manager: *PackageManager, buf: *[bun.MAX_PATH_BYTES]u8) ![]u8 {
|
||||
var url_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const url = this.getGitHubURL(manager.lockfile, &url_buf);
|
||||
|
||||
const url_hash = std.hash.Wyhash.hash(0, url);
|
||||
const hex_fmt = bun.fmt.hexIntLower(url_hash);
|
||||
|
||||
const repo = manager.lockfile.str(this.repo);
|
||||
|
||||
return try std.fmt.bufPrint(buf, "{s}-{any}", .{ repo[0..@min(16, repo.len)], hex_fmt });
|
||||
}
|
||||
|
||||
pub fn getCacheDirectory(this: Repository, manager: *PackageManager, buf: *[bun.MAX_PATH_BYTES]u8) ![]u8 {
|
||||
var url_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const url = this.getURL(manager.lockfile, &url_buf);
|
||||
|
||||
const url_hash = std.hash.Wyhash.hash(0, url);
|
||||
const hex_fmt = bun.fmt.hexIntLower(url_hash);
|
||||
|
||||
const repo = manager.lockfile.str(this.repo);
|
||||
|
||||
return try std.fmt.bufPrint(buf, "{s}-{any}", .{ repo[0..@min(16, repo.len)], hex_fmt });
|
||||
}
|
||||
|
||||
pub fn getCachePathForGitHub(this: Repository, manager: *PackageManager, buf: *[bun.MAX_PATH_BYTES]u8) ![]u8 {
|
||||
var url_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const url = this.getGitHubURL(manager.lockfile, &url_buf);
|
||||
|
||||
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const path = try std.os.getFdPath(manager.getGitCacheDirectory().dir.fd, &path_buf);
|
||||
|
||||
const url_hash = std.hash.Wyhash.hash(0, url);
|
||||
const hex_fmt = bun.fmt.hexIntLower(url_hash);
|
||||
|
||||
const repo = manager.lockfile.str(this.repo);
|
||||
|
||||
return try std.fmt.bufPrint(buf, "{s}/{s}-{any}", .{ path, repo[0..@min(16, repo.len)], hex_fmt });
|
||||
}
|
||||
|
||||
pub fn getCachePath(this: Repository, manager: *PackageManager, buf: *[bun.MAX_PATH_BYTES]u8) ![]u8 {
|
||||
var url_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const url = this.getURL(manager.lockfile, &url_buf);
|
||||
|
||||
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const path = try std.os.getFdPath(manager.getGitCacheDirectory().dir.fd, &path_buf);
|
||||
|
||||
const url_hash = std.hash.Wyhash.hash(0, url);
|
||||
const hex_fmt = bun.fmt.hexIntLower(url_hash);
|
||||
|
||||
const repo = manager.lockfile.str(this.repo);
|
||||
|
||||
return try std.fmt.bufPrint(buf, "{s}/{s}-{any}", .{ path, repo[0..@min(16, repo.len)], hex_fmt });
|
||||
}
|
||||
|
||||
pub fn parse(input: *const SlicedString) !Repository {
|
||||
var repo = Repository{};
|
||||
const slice = input.slice;
|
||||
|
||||
// ignore "git+"
|
||||
const i: usize = if (strings.indexOfChar(slice, '+')) |j| j + 1 else 0;
|
||||
if (strings.indexOfChar(slice[i..], ':')) |_j| {
|
||||
var j = i + _j + 1;
|
||||
if (!strings.hasPrefixComptime(slice[j..], "//")) return error.InvalidGitURL;
|
||||
j += 2;
|
||||
if (strings.indexOfAny(slice[j..], ":/")) |k| {
|
||||
j += k + 1;
|
||||
if (strings.indexOfChar(slice[j..], '/')) |l| {
|
||||
j += l;
|
||||
repo.owner = String.init(input.buf, slice[i..j]);
|
||||
} else return error.InvalidGitURL;
|
||||
} else return error.InvalidGitURL;
|
||||
|
||||
if (strings.indexOfChar(slice[j..], '#')) |_k| {
|
||||
var k = _k + j;
|
||||
if (strings.endsWithComptime(slice[j + 1 .. k], ".git")) {
|
||||
repo.repo = String.init(input.buf, slice[j + 1 .. k - ".git".len]);
|
||||
} else {
|
||||
repo.repo = String.init(input.buf, slice[j + 1 .. k]);
|
||||
}
|
||||
repo.committish = String.init(input.buf, slice[k + 1 ..]);
|
||||
} else {
|
||||
const end = if (strings.endsWithComptime(slice[j + 1 ..], ".git")) slice.len - ".git".len else slice.len;
|
||||
repo.repo = String.init(input.buf, slice[j + 1 .. end]);
|
||||
}
|
||||
} else return error.InvalidGitURL;
|
||||
|
||||
return repo;
|
||||
}
|
||||
|
||||
pub fn parseGitHub(input: *const SlicedString) !Repository {
|
||||
var repo = Repository{};
|
||||
// ignore "github:"
|
||||
const i: usize = if (strings.indexOfChar(input.slice, ':')) |j| j + 1 else 0;
|
||||
if (strings.indexOfChar(input.slice, '/')) |j| {
|
||||
repo.owner = String.init(input.buf, input.slice[i..j]);
|
||||
if (strings.indexOfChar(input.slice[j + 1 ..], '#')) |k| {
|
||||
repo.repo = String.init(input.buf, input.slice[j + 1 .. k]);
|
||||
repo.committish = String.init(input.buf, input.slice[k + 1 ..]);
|
||||
} else {
|
||||
repo.repo = String.init(input.buf, input.slice[j + 1 ..]);
|
||||
}
|
||||
} else {
|
||||
return error.InvalidGitURL;
|
||||
}
|
||||
return repo;
|
||||
}
|
||||
|
||||
pub const Formatter = struct {
|
||||
label: []const u8 = "",
|
||||
buf: []const u8,
|
||||
|
||||
@@ -26,8 +26,7 @@ pub const Resolution = extern struct {
|
||||
return switch (lhs.tag) {
|
||||
.npm => lhs.value.npm.order(rhs.value.npm, lhs_buf, rhs_buf),
|
||||
.local_tarball => lhs.value.local_tarball.order(&rhs.value.local_tarball, lhs_buf, rhs_buf),
|
||||
.git_ssh => lhs.value.git_ssh.order(&rhs.value.git_ssh, lhs_buf, rhs_buf),
|
||||
.git_http => lhs.value.git_http.order(&rhs.value.git_http, lhs_buf, rhs_buf),
|
||||
.git => lhs.value.git.order(&rhs.value.git, lhs_buf, rhs_buf),
|
||||
.folder => lhs.value.folder.order(&rhs.value.folder, lhs_buf, rhs_buf),
|
||||
.remote_tarball => lhs.value.remote_tarball.order(&rhs.value.remote_tarball, lhs_buf, rhs_buf),
|
||||
.workspace => lhs.value.workspace.order(&rhs.value.workspace, lhs_buf, rhs_buf),
|
||||
@@ -43,8 +42,7 @@ pub const Resolution = extern struct {
|
||||
switch (this.tag) {
|
||||
.npm => this.value.npm.count(buf, Builder, builder),
|
||||
.local_tarball => builder.count(this.value.local_tarball.slice(buf)),
|
||||
.git_ssh => builder.count(this.value.git_ssh.slice(buf)),
|
||||
.git_http => builder.count(this.value.git_http.slice(buf)),
|
||||
.git => this.value.git.count(buf, Builder, builder),
|
||||
.folder => builder.count(this.value.folder.slice(buf)),
|
||||
.remote_tarball => builder.count(this.value.remote_tarball.slice(buf)),
|
||||
.workspace => builder.count(this.value.workspace.slice(buf)),
|
||||
@@ -66,11 +64,8 @@ pub const Resolution = extern struct {
|
||||
.local_tarball => Resolution.Value{
|
||||
.local_tarball = builder.append(String, this.value.local_tarball.slice(buf)),
|
||||
},
|
||||
.git_ssh => Resolution.Value{
|
||||
.git_ssh = builder.append(String, this.value.git_ssh.slice(buf)),
|
||||
},
|
||||
.git_http => Resolution.Value{
|
||||
.git_http = builder.append(String, this.value.git_http.slice(buf)),
|
||||
.git => Resolution.Value{
|
||||
.git = this.value.git.clone(buf, Builder, builder),
|
||||
},
|
||||
.folder => Resolution.Value{
|
||||
.folder = builder.append(String, this.value.folder.slice(buf)),
|
||||
@@ -123,13 +118,8 @@ pub const Resolution = extern struct {
|
||||
lhs_string_buf,
|
||||
rhs_string_buf,
|
||||
),
|
||||
.git_ssh => lhs.value.git_ssh.eql(
|
||||
rhs.value.git_ssh,
|
||||
lhs_string_buf,
|
||||
rhs_string_buf,
|
||||
),
|
||||
.git_http => lhs.value.git_http.eql(
|
||||
rhs.value.git_http,
|
||||
.git => lhs.value.git.eql(
|
||||
rhs.value.git,
|
||||
lhs_string_buf,
|
||||
rhs_string_buf,
|
||||
),
|
||||
@@ -183,8 +173,7 @@ pub const Resolution = extern struct {
|
||||
switch (formatter.resolution.tag) {
|
||||
.npm => try writer.writeAll(formatter.resolution.value.npm.url.slice(formatter.buf)),
|
||||
.local_tarball => try writer.writeAll(formatter.resolution.value.local_tarball.slice(formatter.buf)),
|
||||
.git_ssh => try std.fmt.format(writer, "git+ssh://{s}", .{formatter.resolution.value.git_ssh.slice(formatter.buf)}),
|
||||
.git_http => try std.fmt.format(writer, "https://{s}", .{formatter.resolution.value.git_http.slice(formatter.buf)}),
|
||||
.git => try formatter.resolution.value.git.formatAs("git", formatter.buf, layout, opts, writer),
|
||||
.folder => try writer.writeAll(formatter.resolution.value.folder.slice(formatter.buf)),
|
||||
.remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)),
|
||||
.github => try formatter.resolution.value.github.formatAs("github", formatter.buf, layout, opts, writer),
|
||||
@@ -205,8 +194,7 @@ pub const Resolution = extern struct {
|
||||
switch (formatter.resolution.tag) {
|
||||
.npm => try formatter.resolution.value.npm.version.fmt(formatter.buf).format(layout, opts, writer),
|
||||
.local_tarball => try writer.writeAll(formatter.resolution.value.local_tarball.slice(formatter.buf)),
|
||||
.git_ssh => try std.fmt.format(writer, "git+ssh://{s}", .{formatter.resolution.value.git_ssh.slice(formatter.buf)}),
|
||||
.git_http => try std.fmt.format(writer, "https://{s}", .{formatter.resolution.value.git_http.slice(formatter.buf)}),
|
||||
.git => try formatter.resolution.value.git.formatAs("git", formatter.buf, layout, opts, writer),
|
||||
.folder => try writer.writeAll(formatter.resolution.value.folder.slice(formatter.buf)),
|
||||
.remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)),
|
||||
.github => try formatter.resolution.value.github.formatAs("github", formatter.buf, layout, opts, writer),
|
||||
@@ -228,8 +216,7 @@ pub const Resolution = extern struct {
|
||||
/// File path to a tarball relative to the package root
|
||||
local_tarball: String,
|
||||
|
||||
git_ssh: String,
|
||||
git_http: String,
|
||||
git: Repository,
|
||||
|
||||
folder: String,
|
||||
|
||||
@@ -258,8 +245,7 @@ pub const Resolution = extern struct {
|
||||
github = 16,
|
||||
gitlab = 24,
|
||||
|
||||
git_ssh = 32,
|
||||
git_http = 33,
|
||||
git = 33,
|
||||
|
||||
symlink = 64,
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ const Features = @import("../install.zig").Features;
|
||||
const IdentityContext = @import("../../identity_context.zig").IdentityContext;
|
||||
const strings = @import("bun").strings;
|
||||
const Resolution = @import("../resolution.zig").Resolution;
|
||||
const Repository = @import("../repository.zig").Repository;
|
||||
const String = @import("../semver.zig").String;
|
||||
const Semver = @import("../semver.zig");
|
||||
const bun = @import("bun");
|
||||
@@ -70,11 +71,38 @@ pub const FolderResolution = union(Tag) {
|
||||
pub fn count(_: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) void {}
|
||||
};
|
||||
|
||||
pub const GitCacheFolderResolver = struct {
|
||||
repository: Repository,
|
||||
version_tag: Dependency.Version.Tag,
|
||||
|
||||
pub fn resolve(this: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) !Resolution {
|
||||
return Resolution{
|
||||
.tag = if (this.version_tag == .git) Resolution.Tag.git else Resolution.Tag.github,
|
||||
.value = .{
|
||||
.git = this.repository,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn count(_: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) void {}
|
||||
};
|
||||
|
||||
pub fn normalizePackageJSONPath(global_or_relative: GlobalOrRelative, joined: *[bun.MAX_PATH_BYTES]u8, non_normalized_path: string) [2]string {
|
||||
var abs: string = "";
|
||||
var rel: string = "";
|
||||
// We consider it valid if there is a package.json in the folder
|
||||
const normalized = std.mem.trimRight(u8, normalize(non_normalized_path), std.fs.path.sep_str);
|
||||
const temp_normalized = std.mem.trimRight(u8, normalize(non_normalized_path), std.fs.path.sep_str);
|
||||
var normalized_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
std.mem.copy(u8, &normalized_buf, temp_normalized);
|
||||
var normalized = normalized_buf[0..temp_normalized.len];
|
||||
|
||||
if (global_or_relative == .git_cache_folder and normalized[0] != '/') {
|
||||
var temp_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
temp_path_buf[0] = '/';
|
||||
std.mem.copy(u8, temp_path_buf[1..], normalized);
|
||||
normalized.len += 1;
|
||||
std.mem.copy(u8, normalized, temp_path_buf[0..normalized.len]);
|
||||
}
|
||||
|
||||
if (strings.startsWithChar(normalized, '.')) {
|
||||
var tempcat: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
@@ -117,6 +145,39 @@ pub const FolderResolution = union(Tag) {
|
||||
return .{ abs, rel };
|
||||
}
|
||||
|
||||
pub fn readPackageJSONFromBytes(
|
||||
manager: *PackageManager,
|
||||
abs: []const u8,
|
||||
json_bytes: string,
|
||||
version: Dependency.Version,
|
||||
comptime features: Features,
|
||||
comptime ResolverType: type,
|
||||
resolver: ResolverType,
|
||||
) !Lockfile.Package {
|
||||
var package = Lockfile.Package{};
|
||||
var body = Npm.Registry.BodyPool.get(manager.allocator);
|
||||
defer Npm.Registry.BodyPool.release(body);
|
||||
|
||||
const source = logger.Source.initPathString(abs, json_bytes);
|
||||
|
||||
try Lockfile.Package.parse(
|
||||
manager.lockfile,
|
||||
&package,
|
||||
manager.allocator,
|
||||
manager.log,
|
||||
source,
|
||||
ResolverType,
|
||||
resolver,
|
||||
features,
|
||||
);
|
||||
|
||||
if (manager.lockfile.getPackageID(package.name_hash, version, package.resolution)) |existing_id| {
|
||||
return manager.lockfile.packages.get(existing_id);
|
||||
}
|
||||
|
||||
return manager.lockfile.appendPackage(package) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn readPackageJSONFromDisk(
|
||||
manager: *PackageManager,
|
||||
joinedZ: [:0]const u8,
|
||||
@@ -162,8 +223,76 @@ pub const FolderResolution = union(Tag) {
|
||||
global: []const u8,
|
||||
relative: void,
|
||||
cache_folder: []const u8,
|
||||
git_cache_folder: []const u8,
|
||||
};
|
||||
|
||||
pub fn getOrPutWithPackageJSONBytes(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager, json_bytes: string) FolderResolution {
|
||||
var joined: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const paths = normalizePackageJSONPath(global_or_relative, &joined, non_normalized_path);
|
||||
const abs = paths[0];
|
||||
const rel = paths[1];
|
||||
|
||||
var entry = manager.folders.getOrPut(manager.allocator, hash(abs)) catch unreachable;
|
||||
if (entry.found_existing) {
|
||||
if (global_or_relative != .git_cache_folder or entry.value_ptr.*.err != error.MissingPackageJSON) {
|
||||
return entry.value_ptr.*;
|
||||
}
|
||||
}
|
||||
|
||||
const package: Lockfile.Package = switch (global_or_relative) {
|
||||
.global => readPackageJSONFromBytes(
|
||||
manager,
|
||||
abs,
|
||||
json_bytes,
|
||||
version,
|
||||
Features.link,
|
||||
SymlinkResolver,
|
||||
SymlinkResolver{ .folder_path = non_normalized_path },
|
||||
),
|
||||
.relative => readPackageJSONFromBytes(
|
||||
manager,
|
||||
abs,
|
||||
json_bytes,
|
||||
version,
|
||||
Features.folder,
|
||||
Resolver,
|
||||
Resolver{ .folder_path = rel },
|
||||
),
|
||||
.cache_folder => readPackageJSONFromBytes(
|
||||
manager,
|
||||
abs,
|
||||
json_bytes,
|
||||
version,
|
||||
Features.npm,
|
||||
CacheFolderResolver,
|
||||
CacheFolderResolver{ .version = version.value.npm.toVersion() },
|
||||
),
|
||||
.git_cache_folder => readPackageJSONFromBytes(
|
||||
manager,
|
||||
abs,
|
||||
json_bytes,
|
||||
version,
|
||||
Features.git,
|
||||
GitCacheFolderResolver,
|
||||
GitCacheFolderResolver{
|
||||
.repository = if (version.tag == .git) version.value.git else version.value.github,
|
||||
.version_tag = version.tag,
|
||||
},
|
||||
),
|
||||
} catch |err| {
|
||||
if (err == error.FileNotFound) {
|
||||
entry.value_ptr.* = .{ .err = error.MissingPackageJSON };
|
||||
} else {
|
||||
entry.value_ptr.* = .{ .err = err };
|
||||
}
|
||||
|
||||
return entry.value_ptr.*;
|
||||
};
|
||||
|
||||
entry.value_ptr.* = .{ .package_id = package.meta.id };
|
||||
return FolderResolution{ .new_package_id = package.meta.id };
|
||||
}
|
||||
|
||||
pub fn getOrPut(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager) FolderResolution {
|
||||
var joined: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const paths = normalizePackageJSONPath(global_or_relative, &joined, non_normalized_path);
|
||||
@@ -171,7 +300,11 @@ pub const FolderResolution = union(Tag) {
|
||||
const rel = paths[1];
|
||||
|
||||
var entry = manager.folders.getOrPut(manager.allocator, hash(abs)) catch unreachable;
|
||||
if (entry.found_existing) return entry.value_ptr.*;
|
||||
if (entry.found_existing) {
|
||||
if (global_or_relative != .git_cache_folder or entry.value_ptr.*.err != error.MissingPackageJSON) {
|
||||
return entry.value_ptr.*;
|
||||
}
|
||||
}
|
||||
|
||||
joined[abs.len] = 0;
|
||||
var joinedZ: [:0]u8 = joined[0..abs.len :0];
|
||||
@@ -203,6 +336,18 @@ pub const FolderResolution = union(Tag) {
|
||||
CacheFolderResolver,
|
||||
CacheFolderResolver{ .version = version.value.npm.toVersion() },
|
||||
),
|
||||
.git_cache_folder => readPackageJSONFromDisk(
|
||||
manager,
|
||||
joinedZ,
|
||||
abs,
|
||||
version,
|
||||
Features.git,
|
||||
GitCacheFolderResolver,
|
||||
GitCacheFolderResolver{
|
||||
.repository = if (version.tag == .git) version.value.git else version.value.github,
|
||||
.version_tag = version.tag,
|
||||
},
|
||||
),
|
||||
} catch |err| {
|
||||
if (err == error.FileNotFound) {
|
||||
entry.value_ptr.* = .{ .err = error.MissingPackageJSON };
|
||||
|
||||
Reference in New Issue
Block a user