mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
29 Commits
bun-v1.3.3
...
claude/asy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df529b85e0 | ||
|
|
30936eedc8 | ||
|
|
f1009f1ad7 | ||
|
|
f14b5560c5 | ||
|
|
e3fc2ccf8e | ||
|
|
a209f1ce86 | ||
|
|
abeac1bf9c | ||
|
|
989b38dd8d | ||
|
|
1f00f4ca48 | ||
|
|
97ffded458 | ||
|
|
8a7ec11d6b | ||
|
|
edc3422450 | ||
|
|
20b6dfeb3e | ||
|
|
145182068d | ||
|
|
8ccde69438 | ||
|
|
1ea36f08c6 | ||
|
|
369e9bc549 | ||
|
|
d56ce9b5be | ||
|
|
ba8eccd72e | ||
|
|
648340d359 | ||
|
|
d5ddad66a5 | ||
|
|
b43d6a6cf2 | ||
|
|
bb07da19a7 | ||
|
|
bcebf4e663 | ||
|
|
61fb05a7c8 | ||
|
|
d8fe6471ff | ||
|
|
5ba8c4feda | ||
|
|
a3f269b13a | ||
|
|
04d498cfc4 |
@@ -603,6 +603,7 @@ src/install/bin.zig
|
||||
src/install/dependency.zig
|
||||
src/install/ExternalSlice.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/git_command_runner.zig
|
||||
src/install/hoisted_install.zig
|
||||
src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
|
||||
@@ -82,6 +82,7 @@ pub const ProcessExitHandler = struct {
|
||||
.{
|
||||
Subprocess,
|
||||
LifecycleScriptSubprocess,
|
||||
GitCommandRunner,
|
||||
ShellSubprocess,
|
||||
ProcessHandle,
|
||||
|
||||
@@ -115,6 +116,10 @@ pub const ProcessExitHandler = struct {
|
||||
const subprocess = this.ptr.as(ShellSubprocess);
|
||||
subprocess.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(GitCommandRunner)) => {
|
||||
const runner = this.ptr.as(GitCommandRunner);
|
||||
runner.onProcessExit(process, status, rusage);
|
||||
},
|
||||
@field(TaggedPointer.Tag, @typeName(SyncProcess)) => {
|
||||
const subprocess = this.ptr.as(SyncProcess);
|
||||
if (comptime Environment.isPosix) {
|
||||
@@ -2246,10 +2251,12 @@ const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const PosixSpawn = bun.spawn;
|
||||
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
|
||||
const Maybe = bun.sys.Maybe;
|
||||
const ShellSubprocess = bun.shell.ShellSubprocess;
|
||||
const uv = bun.windows.libuv;
|
||||
|
||||
const GitCommandRunner = bun.install.GitCommandRunner;
|
||||
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const Subprocess = jsc.Subprocess;
|
||||
|
||||
@@ -135,9 +135,13 @@ updating_packages: bun.StringArrayHashMapUnmanaged(PackageUpdateInfo) = .{},
|
||||
patched_dependencies_to_remove: std.ArrayHashMapUnmanaged(PackageNameAndVersionHash, void, ArrayIdentityContext.U64, false) = .{},
|
||||
|
||||
active_lifecycle_scripts: LifecycleScriptSubprocess.List,
|
||||
active_git_commands: GitCommandRunner.List,
|
||||
last_reported_slow_lifecycle_script_at: u64 = 0,
|
||||
cached_tick_for_slow_lifecycle_script_logging: u64 = 0,
|
||||
|
||||
// Runtime callbacks for package extraction
|
||||
onExtractCallback: ?ExtractCallback = null,
|
||||
|
||||
/// Corresponds to possible commands from the CLI.
|
||||
pub const Subcommand = enum {
|
||||
install,
|
||||
@@ -204,6 +208,21 @@ pub const Subcommand = enum {
|
||||
}
|
||||
};
|
||||
|
||||
pub const ExtractCallback = union(enum) {
|
||||
package_installer: struct {
|
||||
ctx: *PackageInstaller,
|
||||
fn_ptr: *const fn (ctx: *PackageInstaller, task_id: Task.Id, dependency_id: DependencyID, data: *const ExtractData, log_level: Options.LogLevel) void,
|
||||
},
|
||||
store_installer: struct {
|
||||
ctx: *Store.Installer,
|
||||
fn_ptr: *const fn (ctx: *Store.Installer, task_id: Task.Id) void,
|
||||
},
|
||||
default: struct {
|
||||
ctx: *PackageManager,
|
||||
fn_ptr: *const fn (ctx: *PackageManager, task_id: Task.Id, dependency_id: DependencyID, data: *const ExtractData, log_level: Options.LogLevel) void,
|
||||
},
|
||||
};
|
||||
|
||||
pub const WorkspaceFilter = union(enum) {
|
||||
all,
|
||||
name: []const u8,
|
||||
@@ -850,6 +869,9 @@ pub fn init(
|
||||
.active_lifecycle_scripts = .{
|
||||
.context = manager,
|
||||
},
|
||||
.active_git_commands = .{
|
||||
.context = manager,
|
||||
},
|
||||
.network_task_fifo = NetworkQueue.init(),
|
||||
.patch_task_fifo = PatchTaskFifo.init(),
|
||||
.allocator = ctx.allocator,
|
||||
@@ -1022,6 +1044,9 @@ pub fn initWithRuntimeOnce(
|
||||
.active_lifecycle_scripts = .{
|
||||
.context = manager,
|
||||
},
|
||||
.active_git_commands = .{
|
||||
.context = manager,
|
||||
},
|
||||
.network_task_fifo = NetworkQueue.init(),
|
||||
.allocator = allocator,
|
||||
.log = log,
|
||||
@@ -1246,6 +1271,56 @@ pub const updatePackageJSONAndInstallWithManager = @import("./PackageManager/upd
|
||||
const string = []const u8;
|
||||
const stringZ = [:0]const u8;
|
||||
|
||||
/// Default callback for handling extracted packages when no installer context is available
|
||||
pub fn onExtractDefault(
|
||||
manager: *PackageManager,
|
||||
task_id: Task.Id,
|
||||
dependency_id: DependencyID,
|
||||
data: *const ExtractData,
|
||||
log_level: Options.LogLevel,
|
||||
) void {
|
||||
_ = data;
|
||||
_ = log_level;
|
||||
_ = dependency_id;
|
||||
|
||||
// Process any dependency_install_context items in the task queue
|
||||
if (manager.task_queue.fetchRemove(task_id)) |removed| {
|
||||
var callbacks = removed.value;
|
||||
defer callbacks.deinit(manager.allocator);
|
||||
|
||||
if (callbacks.items.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// For each dependency_install_context, we need to install the package to node_modules
|
||||
for (callbacks.items) |*cb| {
|
||||
switch (cb.*) {
|
||||
.dependency_install_context => |context| {
|
||||
// The package is already in the cache, we just need to link/copy it to node_modules
|
||||
const context_package_id = manager.lockfile.buffers.resolutions.items[context.dependency_id];
|
||||
const name = manager.lockfile.packages.items(.name)[context_package_id];
|
||||
|
||||
// TODO: Actually implement the linking/copying from cache to node_modules
|
||||
// This is a simplified version - the actual implementation would need to:
|
||||
// 1. Get the correct node_modules path from context.path
|
||||
// 2. Create the package folder in node_modules
|
||||
// 3. Link or copy files from cache to node_modules
|
||||
// 4. Handle bin links
|
||||
// 5. Handle lifecycle scripts
|
||||
|
||||
if (PackageManager.verbose_install) {
|
||||
const label = manager.lockfile.str(&name);
|
||||
Output.prettyErrorln(" -> Installing {s} to node_modules (from cache)", .{label});
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// Other context types would be handled by their specific installers
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DirInfo = @import("../resolver/dir_info.zig");
|
||||
const resolution = @import("./PackageManager/PackageManagerResolution.zig");
|
||||
const std = @import("std");
|
||||
@@ -1289,8 +1364,10 @@ const AsyncHTTP = HTTP.AsyncHTTP;
|
||||
const ArrayIdentityContext = bun.install.ArrayIdentityContext;
|
||||
const Dependency = bun.install.Dependency;
|
||||
const DependencyID = bun.install.DependencyID;
|
||||
const ExtractData = bun.install.ExtractData;
|
||||
const Features = bun.install.Features;
|
||||
const FolderResolution = bun.install.FolderResolution;
|
||||
const GitCommandRunner = bun.install.GitCommandRunner;
|
||||
const IdentityContext = bun.install.IdentityContext;
|
||||
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
|
||||
const NetworkTask = bun.install.NetworkTask;
|
||||
@@ -1301,6 +1378,7 @@ const PackageNameAndVersionHash = bun.install.PackageNameAndVersionHash;
|
||||
const PackageNameHash = bun.install.PackageNameHash;
|
||||
const PatchTask = bun.install.PatchTask;
|
||||
const PreinstallState = bun.install.PreinstallState;
|
||||
const Store = bun.install.Store;
|
||||
const Task = bun.install.Task;
|
||||
const TaskCallbackContext = bun.install.TaskCallbackContext;
|
||||
const initializeStore = bun.install.initializeStore;
|
||||
|
||||
@@ -139,7 +139,9 @@ noinline fn ensureTemporaryDirectory(this: *PackageManager) std.fs.Dir {
|
||||
const CacheDir = struct { path: string, is_node_modules: bool };
|
||||
pub fn fetchCacheDirectoryPath(env: *DotEnv.Loader, options: ?*const Options) CacheDir {
|
||||
if (env.get("BUN_INSTALL_CACHE_DIR")) |dir| {
|
||||
return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{dir}), .is_node_modules = false };
|
||||
if (dir.len > 0) {
|
||||
return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{dir}), .is_node_modules = false };
|
||||
}
|
||||
}
|
||||
|
||||
if (options) |opts| {
|
||||
|
||||
@@ -180,7 +180,7 @@ pub fn enqueueGitForCheckout(
|
||||
if (checkout_queue.found_existing) return;
|
||||
|
||||
if (this.git_repositories.get(clone_id)) |repo_fd| {
|
||||
this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout(checkout_id, repo_fd, dependency_id, alias, resolution.*, resolved, patch_name_and_version_hash)));
|
||||
this.enqueueGitCheckout(checkout_id, repo_fd, dependency_id, alias, resolution.*, resolved, patch_name_and_version_hash);
|
||||
} else {
|
||||
var clone_queue = this.task_queue.getOrPut(this.allocator, clone_id) catch unreachable;
|
||||
if (!clone_queue.found_existing) {
|
||||
@@ -194,7 +194,7 @@ pub fn enqueueGitForCheckout(
|
||||
|
||||
if (clone_queue.found_existing) return;
|
||||
|
||||
this.task_batch.push(ThreadPool.Batch.from(enqueueGitClone(
|
||||
enqueueGitClone(
|
||||
this,
|
||||
clone_id,
|
||||
alias,
|
||||
@@ -203,7 +203,7 @@ pub fn enqueueGitForCheckout(
|
||||
&this.lockfile.buffers.dependencies.items[dependency_id],
|
||||
resolution,
|
||||
null,
|
||||
)));
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -339,6 +339,14 @@ pub fn enqueueDependencyToRoot(
|
||||
pub fn isDone(closure: *@This()) bool {
|
||||
const manager = closure.manager;
|
||||
if (manager.pendingTaskCount() > 0) {
|
||||
// Set up default callback for handling extracted packages
|
||||
manager.onExtractCallback = .{
|
||||
.default = .{
|
||||
.ctx = manager,
|
||||
.fn_ptr = PackageManager.onExtractDefault,
|
||||
},
|
||||
};
|
||||
|
||||
manager.runTasks(
|
||||
void,
|
||||
{},
|
||||
@@ -812,7 +820,7 @@ pub fn enqueueDependencyWithMainAndSuccessFn(
|
||||
|
||||
if (this.hasCreatedNetworkTask(checkout_id, dependency.behavior.isRequired())) return;
|
||||
|
||||
this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout(
|
||||
this.enqueueGitCheckout(
|
||||
checkout_id,
|
||||
repo_fd,
|
||||
id,
|
||||
@@ -820,7 +828,7 @@ pub fn enqueueDependencyWithMainAndSuccessFn(
|
||||
res,
|
||||
resolved,
|
||||
null,
|
||||
)));
|
||||
);
|
||||
} else {
|
||||
var entry = this.task_queue.getOrPutContext(this.allocator, clone_id, .{}) catch unreachable;
|
||||
if (!entry.found_existing) entry.value_ptr.* = .{};
|
||||
@@ -835,7 +843,7 @@ pub fn enqueueDependencyWithMainAndSuccessFn(
|
||||
|
||||
if (this.hasCreatedNetworkTask(clone_id, dependency.behavior.isRequired())) return;
|
||||
|
||||
this.task_batch.push(ThreadPool.Batch.from(enqueueGitClone(this, clone_id, alias, dep, id, dependency, &res, null)));
|
||||
enqueueGitClone(this, clone_id, alias, dep, id, dependency, &res, null);
|
||||
}
|
||||
},
|
||||
.github => {
|
||||
@@ -1140,46 +1148,138 @@ fn enqueueGitClone(
|
||||
res: *const Resolution,
|
||||
/// if patched then we need to do apply step after network task is done
|
||||
patch_name_and_version_hash: ?u64,
|
||||
) *ThreadPool.Task {
|
||||
var task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
) void {
|
||||
_ = patch_name_and_version_hash; // TODO: handle patches
|
||||
_ = dependency; // Currently unused
|
||||
|
||||
const url = this.lockfile.str(&repository.repo);
|
||||
// Enqueue git clone for url
|
||||
const folder_name = std.fmt.bufPrintZ(&git_folder_name_buf, "{any}.git", .{
|
||||
bun.fmt.hexIntLower(task_id.get()),
|
||||
}) catch unreachable;
|
||||
|
||||
// Build full path for git clone target
|
||||
const target = Path.joinAbsStringZ(this.cache_directory_path, &.{folder_name}, .auto);
|
||||
|
||||
// Check if directory already exists - if so, do fetch instead of clone
|
||||
const dir_exists = if (this.getCacheDirectory().openDirZ(folder_name, .{})) |dir_const| blk: {
|
||||
var dir = dir_const;
|
||||
dir.close();
|
||||
break :blk true;
|
||||
} else |_| false;
|
||||
|
||||
// Build git command arguments
|
||||
var argc: usize = 0;
|
||||
|
||||
// If directory exists, do fetch instead of clone
|
||||
const argv = if (dir_exists) blk: {
|
||||
const args: [10]?[*:0]const u8 = .{
|
||||
"git",
|
||||
"-C",
|
||||
target,
|
||||
"fetch",
|
||||
"--quiet",
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
};
|
||||
argc = 5;
|
||||
break :blk args;
|
||||
} else if (Repository.tryHTTPS(url)) |https| blk: {
|
||||
const args: [10]?[*:0]const u8 = .{
|
||||
"git",
|
||||
"clone",
|
||||
"-c",
|
||||
"core.longpaths=true",
|
||||
"--quiet",
|
||||
"--bare",
|
||||
bun.default_allocator.dupeZ(u8, https) catch unreachable,
|
||||
target,
|
||||
null,
|
||||
null,
|
||||
};
|
||||
argc = 8;
|
||||
break :blk args;
|
||||
} else if (Repository.trySSH(url)) |ssh| blk: {
|
||||
const args: [10]?[*:0]const u8 = .{
|
||||
"git",
|
||||
"clone",
|
||||
"-c",
|
||||
"core.longpaths=true",
|
||||
"--quiet",
|
||||
"--bare",
|
||||
bun.default_allocator.dupeZ(u8, ssh) catch unreachable,
|
||||
target,
|
||||
null,
|
||||
null,
|
||||
};
|
||||
argc = 8;
|
||||
break :blk args;
|
||||
} else {
|
||||
// Can't parse URL - create a failed task
|
||||
const task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = strings.StringOrTinyString.init(name),
|
||||
.url = strings.StringOrTinyString.init(url),
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.allocator) },
|
||||
.dep_id = dep_id,
|
||||
.res = res.*,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.InvalidGitURL,
|
||||
};
|
||||
// Increment pending tasks for this immediate failure task
|
||||
this.incrementPendingTasks(1);
|
||||
this.resolve_tasks.push(task);
|
||||
this.wake();
|
||||
return;
|
||||
};
|
||||
|
||||
// Spawn GitCommandRunner
|
||||
// Increment pending tasks so the event loop knows to wait for this
|
||||
this.incrementPendingTasks(1);
|
||||
GitCommandRunner.spawn(
|
||||
this,
|
||||
task_id,
|
||||
argv[0..argc],
|
||||
.{
|
||||
.clone = .{
|
||||
.name = strings.StringOrTinyString.initAppendIfNeeded(
|
||||
name,
|
||||
*FileSystem.FilenameStore,
|
||||
FileSystem.FilenameStore.instance,
|
||||
) catch unreachable,
|
||||
.url = strings.StringOrTinyString.initAppendIfNeeded(
|
||||
this.lockfile.str(&repository.repo),
|
||||
url,
|
||||
*FileSystem.FilenameStore,
|
||||
FileSystem.FilenameStore.instance,
|
||||
) catch unreachable,
|
||||
.env = Repository.shared_env.get(this.allocator, this.env),
|
||||
.dep_id = dep_id,
|
||||
.res = res.*,
|
||||
.attempt = 1,
|
||||
.is_fetch = dir_exists,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.apply_patch_task = if (patch_name_and_version_hash) |h| brk: {
|
||||
const dep = dependency;
|
||||
const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) {
|
||||
.id => |p| p,
|
||||
.ids => |ps| ps.items[0], // TODO is this correct
|
||||
};
|
||||
const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?;
|
||||
const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h);
|
||||
pt.callback.apply.task_id = task_id;
|
||||
break :brk pt;
|
||||
} else null,
|
||||
.data = undefined,
|
||||
};
|
||||
return &task.threadpool_task;
|
||||
);
|
||||
}
|
||||
|
||||
fn dummyCallback(_: *ThreadPool.Task) void {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
var git_folder_name_buf: [1024]u8 = undefined;
|
||||
|
||||
pub fn enqueueGitCheckout(
|
||||
this: *PackageManager,
|
||||
task_id: Task.Id,
|
||||
@@ -1190,16 +1290,157 @@ pub fn enqueueGitCheckout(
|
||||
resolved: string,
|
||||
/// if patched then we need to do apply step after network task is done
|
||||
patch_name_and_version_hash: ?u64,
|
||||
) *ThreadPool.Task {
|
||||
var task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
) void {
|
||||
const folder_name = PackageManager.cachedGitFolderNamePrint(&git_folder_name_buf, resolved, null);
|
||||
const target = Path.joinAbsString(this.cache_directory_path, &.{folder_name}, .auto);
|
||||
|
||||
// Check if the checkout directory already exists - if so, just return success immediately
|
||||
if (this.getCacheDirectory().openDir(folder_name, .{})) |package_dir_const| {
|
||||
var package_dir = package_dir_const;
|
||||
package_dir.close();
|
||||
|
||||
// Directory already exists, create a success task immediately
|
||||
const task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
.repo_dir = dir,
|
||||
.resolution = resolution,
|
||||
.dependency_id = dependency_id,
|
||||
.name = strings.StringOrTinyString.init(name),
|
||||
.url = strings.StringOrTinyString.init(this.lockfile.str(&resolution.value.git.repo)),
|
||||
.resolved = strings.StringOrTinyString.init(resolved),
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.allocator) },
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_checkout = .{
|
||||
.url = this.lockfile.str(&resolution.value.git.repo),
|
||||
.resolved = resolved,
|
||||
} },
|
||||
.status = .success,
|
||||
.err = null,
|
||||
.apply_patch_task = if (patch_name_and_version_hash) |h| brk: {
|
||||
const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?;
|
||||
const ptask = PatchTask.newApplyPatchHash(this, dependency_id, patch_hash, h);
|
||||
ptask.callback.apply.task_id = task_id;
|
||||
break :brk ptask;
|
||||
} else null,
|
||||
};
|
||||
this.incrementPendingTasks(1);
|
||||
this.resolve_tasks.push(task);
|
||||
this.wake();
|
||||
return;
|
||||
} else |_| {}
|
||||
|
||||
// Verify we can get the directory path - if not, fail early
|
||||
_ = bun.getFdPath(dir, &git_path_buf) catch |err| {
|
||||
// If we can't get the path, create a failed task
|
||||
const task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
.repo_dir = dir,
|
||||
.resolution = resolution,
|
||||
.dependency_id = dependency_id,
|
||||
.name = strings.StringOrTinyString.init(name),
|
||||
.url = strings.StringOrTinyString.init(this.lockfile.str(&resolution.value.git.repo)),
|
||||
.resolved = strings.StringOrTinyString.init(resolved),
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.allocator) },
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_checkout = .{} },
|
||||
.status = .fail,
|
||||
.err = err,
|
||||
};
|
||||
// Increment pending tasks for this immediate failure task
|
||||
this.incrementPendingTasks(1);
|
||||
this.resolve_tasks.push(task);
|
||||
this.wake();
|
||||
return;
|
||||
};
|
||||
|
||||
// Ensure the cache directory and parent directories exist before cloning into it
|
||||
_ = this.getCacheDirectory();
|
||||
|
||||
// Also ensure the parent directory of the target exists
|
||||
// Since git clone won't create parent directories
|
||||
const parent_end = std.mem.lastIndexOf(u8, target, std.fs.path.sep_str) orelse target.len;
|
||||
if (parent_end > 0) {
|
||||
const parent_dir = target[0..parent_end];
|
||||
std.fs.cwd().makePath(parent_dir) catch |err| {
|
||||
// If we can't create the parent directory, the clone will fail
|
||||
if (err != error.PathAlreadyExists) {
|
||||
const task = this.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this,
|
||||
.log = logger.Log.init(this.allocator),
|
||||
.tag = Task.Tag.git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
.repo_dir = dir,
|
||||
.resolution = resolution,
|
||||
.dependency_id = dependency_id,
|
||||
.name = strings.StringOrTinyString.init(name),
|
||||
.url = strings.StringOrTinyString.init(this.lockfile.str(&resolution.value.git.repo)),
|
||||
.resolved = strings.StringOrTinyString.init(resolved),
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.allocator) },
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_checkout = .{} },
|
||||
.status = .fail,
|
||||
.err = err,
|
||||
};
|
||||
// Increment pending tasks for this immediate failure task
|
||||
this.incrementPendingTasks(1);
|
||||
this.resolve_tasks.push(task);
|
||||
this.wake();
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Build git command arguments for clone --no-checkout
|
||||
// Get the git repository URL and transform it if necessary
|
||||
const git_url = this.lockfile.str(&resolution.value.git.repo);
|
||||
// Try HTTPS transformation for SCP-like paths (more compatible than SSH)
|
||||
const transformed_url = Repository.tryHTTPS(git_url) orelse git_url;
|
||||
|
||||
const argv: [10]?[*:0]const u8 = .{
|
||||
"git",
|
||||
"clone",
|
||||
"-c",
|
||||
"core.longpaths=true",
|
||||
"--quiet",
|
||||
"--no-checkout",
|
||||
bun.default_allocator.dupeZ(u8, transformed_url) catch unreachable, // repository URL
|
||||
bun.default_allocator.dupeZ(u8, target) catch unreachable, // target directory
|
||||
null,
|
||||
null,
|
||||
};
|
||||
const argc: usize = 8;
|
||||
|
||||
// Spawn GitCommandRunner
|
||||
// Increment pending tasks so the event loop knows to wait for this
|
||||
this.incrementPendingTasks(1);
|
||||
GitCommandRunner.spawn(
|
||||
this,
|
||||
task_id,
|
||||
argv[0..argc],
|
||||
.{
|
||||
.checkout = .{
|
||||
.repo_dir = dir,
|
||||
.resolution = resolution,
|
||||
.dependency_id = dependency_id,
|
||||
.name = strings.StringOrTinyString.initAppendIfNeeded(
|
||||
name,
|
||||
@@ -1216,26 +1457,16 @@ pub fn enqueueGitCheckout(
|
||||
*FileSystem.FilenameStore,
|
||||
FileSystem.FilenameStore.instance,
|
||||
) catch unreachable,
|
||||
.env = Repository.shared_env.get(this.allocator, this.env),
|
||||
.resolution = resolution,
|
||||
.target_dir = bun.default_allocator.dupe(u8, target) catch unreachable,
|
||||
.patch_name_and_version_hash = patch_name_and_version_hash,
|
||||
},
|
||||
},
|
||||
.apply_patch_task = if (patch_name_and_version_hash) |h| brk: {
|
||||
const dep = this.lockfile.buffers.dependencies.items[dependency_id];
|
||||
const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) {
|
||||
.id => |p| p,
|
||||
.ids => |ps| ps.items[0], // TODO is this correct
|
||||
};
|
||||
const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?;
|
||||
const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h);
|
||||
pt.callback.apply.task_id = task_id;
|
||||
break :brk pt;
|
||||
} else null,
|
||||
.id = task_id,
|
||||
.data = undefined,
|
||||
};
|
||||
return &task.threadpool_task;
|
||||
);
|
||||
}
|
||||
|
||||
var git_path_buf: bun.PathBuffer = undefined;
|
||||
|
||||
fn enqueueLocalTarball(
|
||||
this: *PackageManager,
|
||||
task_id: Task.Id,
|
||||
@@ -1768,6 +1999,7 @@ const string = []const u8;
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const DotEnv = bun.DotEnv;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const Path = bun.path;
|
||||
@@ -1787,6 +2019,7 @@ const DependencyID = bun.install.DependencyID;
|
||||
const ExtractTarball = bun.install.ExtractTarball;
|
||||
const Features = bun.install.Features;
|
||||
const FolderResolution = bun.install.FolderResolution;
|
||||
const GitCommandRunner = bun.install.GitCommandRunner;
|
||||
const Npm = bun.install.Npm;
|
||||
const PackageID = bun.install.PackageID;
|
||||
const PackageNameHash = bun.install.PackageNameHash;
|
||||
|
||||
@@ -143,17 +143,35 @@ fn parseWithError(
|
||||
|
||||
var value = input;
|
||||
var alias: ?string = null;
|
||||
if (!Dependency.isTarball(input) and strings.isNPMPackageName(input)) {
|
||||
alias = input;
|
||||
value = input[input.len..];
|
||||
} else if (input.len > 1) {
|
||||
// Check for alias@url syntax (e.g., myalias@github.com:user/repo)
|
||||
// Only look for @ if the input contains : (indicates a URL or path)
|
||||
// BUT skip if it starts with git@ (SSH URL format)
|
||||
if (input.len > 1 and strings.containsChar(input, ':') and !(input.len >= 4 and strings.eqlComptime(input[0..4], "git@"))) {
|
||||
if (strings.indexOfChar(input[1..], '@')) |at| {
|
||||
const name = input[0 .. at + 1];
|
||||
// Check if the part before @ is a valid package name (alias)
|
||||
if (strings.isNPMPackageName(name)) {
|
||||
alias = name;
|
||||
value = input[at + 2 ..];
|
||||
}
|
||||
}
|
||||
} else if (!Dependency.isTarball(input) and !(input.len >= 4 and strings.eqlComptime(input[0..4], "git@"))) {
|
||||
// Check for package@version format (e.g., bar@0.0.2)
|
||||
// Skip if it's a git@ SSH URL
|
||||
if (input.len > 1) {
|
||||
if (strings.indexOfChar(input[1..], '@')) |at| {
|
||||
const name = input[0 .. at + 1];
|
||||
if (strings.isNPMPackageName(name)) {
|
||||
alias = name;
|
||||
value = input[at + 2 ..];
|
||||
}
|
||||
}
|
||||
}
|
||||
// If no @ found or before @ is not a valid package name, check if the whole thing is a package name
|
||||
if (alias == null and strings.isNPMPackageName(input)) {
|
||||
alias = input;
|
||||
value = input[input.len..];
|
||||
}
|
||||
}
|
||||
|
||||
const placeholder = String.from("@@@");
|
||||
@@ -179,21 +197,22 @@ fn parseWithError(
|
||||
|
||||
return error.UnrecognizedDependencyFormat;
|
||||
};
|
||||
if (alias != null and version.tag == .git) {
|
||||
if (Dependency.parseWithOptionalTag(
|
||||
allocator,
|
||||
placeholder,
|
||||
null,
|
||||
input,
|
||||
null,
|
||||
&SlicedString.init(input, input),
|
||||
log,
|
||||
pm,
|
||||
)) |ver| {
|
||||
alias = null;
|
||||
version = ver;
|
||||
}
|
||||
}
|
||||
// Don't re-parse git dependencies with aliases - this was throwing away the alias
|
||||
// if (alias != null and version.tag == .git) {
|
||||
// if (Dependency.parseWithOptionalTag(
|
||||
// allocator,
|
||||
// placeholder,
|
||||
// null,
|
||||
// input,
|
||||
// null,
|
||||
// &SlicedString.init(input, input),
|
||||
// log,
|
||||
// pm,
|
||||
// )) |ver| {
|
||||
// alias = null;
|
||||
// version = ver;
|
||||
// }
|
||||
// }
|
||||
if (switch (version.tag) {
|
||||
.dist_tag => version.value.dist_tag.name.eql(placeholder, input, input),
|
||||
.npm => version.value.npm.name.eql(placeholder, input, input),
|
||||
|
||||
@@ -463,6 +463,14 @@ pub fn installWithManager(
|
||||
|
||||
this.drainDependencyList();
|
||||
|
||||
// Set up default callback for handling extracted packages
|
||||
this.onExtractCallback = .{
|
||||
.default = .{
|
||||
.ctx = this,
|
||||
.fn_ptr = PackageManager.onExtractDefault,
|
||||
},
|
||||
};
|
||||
|
||||
this.runTasks(
|
||||
*PackageManager,
|
||||
this,
|
||||
|
||||
@@ -197,8 +197,7 @@ pub fn processExtractedTarballPackage(
|
||||
|
||||
return package;
|
||||
},
|
||||
else => if (data.json.?.buf.len > 0) {
|
||||
const json = data.json.?;
|
||||
else => if (data.json) |json| if (json.buf.len > 0) {
|
||||
const package_json_source = &logger.Source.initPathString(
|
||||
json.path,
|
||||
json.buf,
|
||||
|
||||
@@ -615,7 +615,46 @@ pub fn runTasks(
|
||||
manager.extracted_count += 1;
|
||||
bun.analytics.Features.extracted_packages += 1;
|
||||
|
||||
if (comptime @TypeOf(callbacks.onExtract) != void) {
|
||||
// Prioritize runtime callback if available
|
||||
if (manager.onExtractCallback) |callback| {
|
||||
switch (callback) {
|
||||
.package_installer => |cb| {
|
||||
cb.ctx.fixCachedLockfilePackageSlices();
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
dependency_id,
|
||||
&task.data.extract,
|
||||
log_level,
|
||||
);
|
||||
},
|
||||
.store_installer => |cb| {
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
);
|
||||
},
|
||||
.default => |cb| {
|
||||
// For default callback, process the package first
|
||||
if (manager.processExtractedTarballPackage(&package_id, dependency_id, resolution, &task.data.extract, log_level)) |pkg| {
|
||||
_ = pkg;
|
||||
// Assign the resolution for the primary dependency
|
||||
if (dependency_id != invalid_package_id and package_id != invalid_package_id) {
|
||||
manager.assignResolution(dependency_id, package_id);
|
||||
}
|
||||
}
|
||||
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
dependency_id,
|
||||
&task.data.extract,
|
||||
log_level,
|
||||
);
|
||||
},
|
||||
}
|
||||
} else if (comptime @TypeOf(callbacks.onExtract) != void) {
|
||||
// Fall back to compile-time callback
|
||||
switch (Ctx) {
|
||||
*PackageInstaller => {
|
||||
extract_ctx.fixCachedLockfilePackageSlices();
|
||||
@@ -635,62 +674,21 @@ pub fn runTasks(
|
||||
},
|
||||
else => @compileError("unexpected context type"),
|
||||
}
|
||||
} else if (manager.processExtractedTarballPackage(&package_id, dependency_id, resolution, &task.data.extract, log_level)) |pkg| handle_pkg: {
|
||||
// In the middle of an install, you could end up needing to downlaod the github tarball for a dependency
|
||||
// We need to make sure we resolve the dependencies first before calling the onExtract callback
|
||||
// TODO: move this into a separate function
|
||||
var any_root = false;
|
||||
var dependency_list_entry = manager.task_queue.getEntry(task.id) orelse break :handle_pkg;
|
||||
var dependency_list = dependency_list_entry.value_ptr.*;
|
||||
dependency_list_entry.value_ptr.* = .{};
|
||||
|
||||
defer {
|
||||
dependency_list.deinit(manager.allocator);
|
||||
if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) {
|
||||
if (any_root) {
|
||||
callbacks.onResolve(extract_ctx);
|
||||
}
|
||||
} else {
|
||||
// No callback - do the default package processing
|
||||
if (manager.processExtractedTarballPackage(&package_id, dependency_id, resolution, &task.data.extract, log_level)) |pkg| {
|
||||
_ = pkg;
|
||||
// Assign the resolution for the primary dependency
|
||||
if (dependency_id != invalid_package_id and package_id != invalid_package_id) {
|
||||
manager.assignResolution(dependency_id, package_id);
|
||||
}
|
||||
}
|
||||
|
||||
for (dependency_list.items) |dep| {
|
||||
switch (dep) {
|
||||
.dependency, .root_dependency => |id| {
|
||||
var version = &manager.lockfile.buffers.dependencies.items[id].version;
|
||||
switch (version.tag) {
|
||||
.git => {
|
||||
version.value.git.package_name = pkg.name;
|
||||
},
|
||||
.github => {
|
||||
version.value.github.package_name = pkg.name;
|
||||
},
|
||||
.tarball => {
|
||||
version.value.tarball.package_name = pkg.name;
|
||||
},
|
||||
|
||||
// `else` is reachable if this package is from `overrides`. Version in `lockfile.buffer.dependencies`
|
||||
// will still have the original.
|
||||
else => {},
|
||||
}
|
||||
try manager.processDependencyListItem(dep, &any_root, install_peer);
|
||||
},
|
||||
else => {
|
||||
// if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback.
|
||||
dependency_list_entry.value_ptr.append(manager.allocator, dep) catch unreachable;
|
||||
},
|
||||
}
|
||||
}
|
||||
} else if (manager.task_queue.getEntry(Task.Id.forManifest(
|
||||
manager.lockfile.str(&manager.lockfile.packages.items(.name)[package_id]),
|
||||
))) |dependency_list_entry| {
|
||||
// Peer dependencies do not initiate any downloads of their own, thus need to be resolved here instead
|
||||
const dependency_list = dependency_list_entry.value_ptr.*;
|
||||
dependency_list_entry.value_ptr.* = .{};
|
||||
|
||||
try manager.processDependencyList(dependency_list, void, {}, {}, install_peer);
|
||||
}
|
||||
|
||||
manager.setPreinstallState(package_id, manager.lockfile, .done);
|
||||
// Only set preinstall state if we have a valid package_id
|
||||
if (package_id != invalid_package_id) {
|
||||
manager.setPreinstallState(package_id, manager.lockfile, .done);
|
||||
}
|
||||
|
||||
if (log_level.showProgress()) {
|
||||
if (!has_updated_this_run) {
|
||||
@@ -758,15 +756,73 @@ pub fn runTasks(
|
||||
|
||||
if (manager.hasCreatedNetworkTask(checkout_id, dep.behavior.isRequired())) continue;
|
||||
|
||||
manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueGitCheckout(
|
||||
// Calculate patch hash if needed
|
||||
const patch_name_and_version_hash: ?u64 = if (manager.lockfile.patched_dependencies.entries.len > 0) brk: {
|
||||
// We need to format the version string with the resolved commit
|
||||
// The repo URL needs to be transformed to match what's in patchedDependencies
|
||||
// e.g., "git@github.com:user/repo.git" -> "git+ssh://git@github.com:user/repo.git"
|
||||
var resolution_buf: [8192]u8 = undefined;
|
||||
var stream = std.io.fixedBufferStream(&resolution_buf);
|
||||
var writer = stream.writer();
|
||||
|
||||
// Write the git resolution format
|
||||
if (strings.hasPrefixComptime(repo, "git@")) {
|
||||
// Transform SCP-like URL to SSH URL format
|
||||
writer.writeAll("git+ssh://") catch unreachable;
|
||||
writer.writeAll(repo) catch unreachable;
|
||||
} else if (strings.hasPrefixComptime(repo, "ssh://")) {
|
||||
writer.writeAll("git+") catch unreachable;
|
||||
writer.writeAll(repo) catch unreachable;
|
||||
} else {
|
||||
writer.writeAll("git+") catch unreachable;
|
||||
writer.writeAll(repo) catch unreachable;
|
||||
}
|
||||
writer.writeByte('#') catch unreachable;
|
||||
writer.writeAll(resolved) catch unreachable;
|
||||
|
||||
const package_version = stream.getWritten();
|
||||
|
||||
// Calculate the hash for "name@version"
|
||||
var name_and_version_buf: [8192]u8 = undefined;
|
||||
const name_and_version = std.fmt.bufPrint(&name_and_version_buf, "{s}@{s}", .{
|
||||
dep_name,
|
||||
package_version,
|
||||
}) catch unreachable;
|
||||
|
||||
const hash = String.Builder.stringHash(name_and_version);
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
Output.prettyErrorln("[git-patch] Looking for patch: {s} (hash={d})", .{ name_and_version, hash });
|
||||
}
|
||||
|
||||
// Check if this dependency has a patch
|
||||
if (manager.lockfile.patched_dependencies.get(hash)) |_| {
|
||||
if (comptime Environment.isDebug) {
|
||||
Output.prettyErrorln("[git-patch] Found patch for git dependency!", .{});
|
||||
}
|
||||
break :brk hash;
|
||||
}
|
||||
|
||||
// Also try checking all patched dependencies to see what we have
|
||||
if (comptime Environment.isDebug) {
|
||||
var iter = manager.lockfile.patched_dependencies.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
Output.prettyErrorln("[git-patch] Available patch: hash={d}", .{entry.key_ptr.*});
|
||||
}
|
||||
}
|
||||
|
||||
break :brk null;
|
||||
} else null;
|
||||
|
||||
manager.enqueueGitCheckout(
|
||||
checkout_id,
|
||||
repo_fd,
|
||||
dep_id,
|
||||
dep_name,
|
||||
clone.res,
|
||||
resolved,
|
||||
null,
|
||||
)));
|
||||
patch_name_and_version_hash,
|
||||
);
|
||||
} else {
|
||||
// Resolving!
|
||||
const dependency_list_entry = manager.task_queue.getEntry(task.id).?;
|
||||
@@ -806,12 +862,59 @@ pub fn runTasks(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (comptime @TypeOf(callbacks.onExtract) != void) {
|
||||
// Prioritize runtime callback if available
|
||||
if (manager.onExtractCallback) |callback| {
|
||||
// We've populated the cache, package already exists in memory. Call the package installer callback
|
||||
// and don't enqueue dependencies
|
||||
switch (callback) {
|
||||
.package_installer => |cb| {
|
||||
// TODO(dylan-conway) most likely don't need to call this now that the package isn't appended, but
|
||||
// keeping just in case for now
|
||||
cb.ctx.fixCachedLockfilePackageSlices();
|
||||
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
git_checkout.dependency_id,
|
||||
&task.data.git_checkout,
|
||||
log_level,
|
||||
);
|
||||
},
|
||||
.store_installer => |cb| {
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
);
|
||||
},
|
||||
.default => |cb| {
|
||||
// For default callback, process the package first
|
||||
if (manager.processExtractedTarballPackage(
|
||||
&package_id,
|
||||
git_checkout.dependency_id,
|
||||
resolution,
|
||||
&task.data.git_checkout,
|
||||
log_level,
|
||||
)) |pkg| {
|
||||
_ = pkg;
|
||||
// Assign the resolution for the primary dependency
|
||||
if (git_checkout.dependency_id != invalid_package_id and package_id != invalid_package_id) {
|
||||
manager.assignResolution(git_checkout.dependency_id, package_id);
|
||||
}
|
||||
}
|
||||
|
||||
cb.fn_ptr(
|
||||
cb.ctx,
|
||||
task.id,
|
||||
git_checkout.dependency_id,
|
||||
&task.data.git_checkout,
|
||||
log_level,
|
||||
);
|
||||
},
|
||||
}
|
||||
} else if (comptime @TypeOf(callbacks.onExtract) != void) {
|
||||
// Fall back to compile-time callback
|
||||
switch (Ctx) {
|
||||
*PackageInstaller => {
|
||||
|
||||
// TODO(dylan-conway) most likely don't need to call this now that the package isn't appended, but
|
||||
// keeping just in case for now
|
||||
extract_ctx.fixCachedLockfilePackageSlices();
|
||||
@@ -1090,6 +1193,7 @@ const ThreadPool = bun.ThreadPool;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
const String = bun.Semver.String;
|
||||
|
||||
const Fs = bun.fs;
|
||||
const FileSystem = Fs.FileSystem;
|
||||
|
||||
@@ -157,82 +157,16 @@ pub fn callback(task: *ThreadPool.Task) void {
|
||||
this.status = Status.success;
|
||||
},
|
||||
.git_clone => {
|
||||
const name = this.request.git_clone.name.slice();
|
||||
const url = this.request.git_clone.url.slice();
|
||||
var attempt: u8 = 1;
|
||||
const dir = brk: {
|
||||
if (Repository.tryHTTPS(url)) |https| break :brk Repository.download(
|
||||
manager.allocator,
|
||||
this.request.git_clone.env,
|
||||
&this.log,
|
||||
manager.getCacheDirectory(),
|
||||
this.id,
|
||||
name,
|
||||
https,
|
||||
attempt,
|
||||
) catch |err| {
|
||||
// Exit early if git checked and could
|
||||
// not find the repository, skip ssh
|
||||
if (err == error.RepositoryNotFound) {
|
||||
this.err = err;
|
||||
this.status = Status.fail;
|
||||
this.data = .{ .git_clone = bun.invalid_fd };
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.err = err;
|
||||
this.status = Status.fail;
|
||||
this.data = .{ .git_clone = bun.invalid_fd };
|
||||
attempt += 1;
|
||||
break :brk null;
|
||||
};
|
||||
break :brk null;
|
||||
} orelse if (Repository.trySSH(url)) |ssh| Repository.download(
|
||||
manager.allocator,
|
||||
this.request.git_clone.env,
|
||||
&this.log,
|
||||
manager.getCacheDirectory(),
|
||||
this.id,
|
||||
name,
|
||||
ssh,
|
||||
attempt,
|
||||
) catch |err| {
|
||||
this.err = err;
|
||||
this.status = Status.fail;
|
||||
this.data = .{ .git_clone = bun.invalid_fd };
|
||||
return;
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
this.err = null;
|
||||
this.data = .{ .git_clone = .fromStdDir(dir) };
|
||||
this.status = Status.success;
|
||||
// Git operations are now handled by GitCommandRunner
|
||||
// This task should already have its data populated by GitCommandRunner
|
||||
// If we get here, it means something went wrong
|
||||
unreachable;
|
||||
},
|
||||
.git_checkout => {
|
||||
const git_checkout = &this.request.git_checkout;
|
||||
const data = Repository.checkout(
|
||||
manager.allocator,
|
||||
this.request.git_checkout.env,
|
||||
&this.log,
|
||||
manager.getCacheDirectory(),
|
||||
git_checkout.repo_dir.stdDir(),
|
||||
git_checkout.name.slice(),
|
||||
git_checkout.url.slice(),
|
||||
git_checkout.resolved.slice(),
|
||||
) catch |err| {
|
||||
this.err = err;
|
||||
this.status = Status.fail;
|
||||
this.data = .{ .git_checkout = .{} };
|
||||
|
||||
return;
|
||||
};
|
||||
|
||||
this.data = .{
|
||||
.git_checkout = data,
|
||||
};
|
||||
this.status = Status.success;
|
||||
// Git operations are now handled by GitCommandRunner
|
||||
// This task should already have its data populated by GitCommandRunner
|
||||
// If we get here, it means something went wrong
|
||||
unreachable;
|
||||
},
|
||||
.local_tarball => {
|
||||
const workspace_pkg_id = manager.lockfile.getWorkspacePkgIfWorkspaceDep(this.request.local_tarball.tarball.dependency_id);
|
||||
@@ -364,7 +298,6 @@ const Npm = install.Npm;
|
||||
const PackageID = install.PackageID;
|
||||
const PackageManager = install.PackageManager;
|
||||
const PatchTask = install.PatchTask;
|
||||
const Repository = install.Repository;
|
||||
const Resolution = install.Resolution;
|
||||
const Task = install.Task;
|
||||
const invalid_package_id = install.invalid_package_id;
|
||||
|
||||
@@ -630,6 +630,10 @@ pub const Version = struct {
|
||||
if (isGitHubRepoPath(url["hub:".len..])) return .github;
|
||||
}
|
||||
},
|
||||
'l' => {
|
||||
// gitlab:user/repo - when url = "lab:user/repo" after "git" prefix
|
||||
if (strings.hasPrefixComptime(url, "lab:")) return .git;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
@@ -745,6 +749,10 @@ pub const Version = struct {
|
||||
// return `Tag.git` or `Tag.npm`.
|
||||
if (strings.hasPrefixComptime(dependency, "patch:")) return .npm;
|
||||
},
|
||||
'b' => {
|
||||
// bitbucket:user/repo
|
||||
if (strings.hasPrefixComptime(dependency, "bitbucket:")) return .git;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@@ -1012,6 +1020,7 @@ pub fn parseWithTag(
|
||||
if (strings.hasPrefixComptime(input, "git+")) {
|
||||
input = input["git+".len..];
|
||||
}
|
||||
// Processing git URL
|
||||
const hash_index = strings.lastIndexOfChar(input, '#');
|
||||
|
||||
return .{
|
||||
|
||||
920
src/install/git_command_runner.zig
Normal file
920
src/install/git_command_runner.zig
Normal file
@@ -0,0 +1,920 @@
|
||||
const log = Output.scoped(.Git, false);
|
||||
|
||||
pub const GitCommandRunner = struct {
|
||||
manager: *PackageManager,
|
||||
process: ?*Process = null,
|
||||
stdout: OutputReader = OutputReader.init(@This()),
|
||||
stderr: OutputReader = OutputReader.init(@This()),
|
||||
has_called_process_exit: bool = false,
|
||||
remaining_fds: i8 = 0,
|
||||
|
||||
task_id: Task.Id,
|
||||
operation: Operation,
|
||||
// For checkout, we need to run two commands
|
||||
checkout_phase: enum { clone, checkout } = .clone,
|
||||
|
||||
heap: bun.io.heap.IntrusiveField(GitCommandRunner) = .{},
|
||||
|
||||
pub const Operation = union(enum) {
|
||||
clone: struct {
|
||||
name: strings.StringOrTinyString,
|
||||
url: strings.StringOrTinyString,
|
||||
dep_id: DependencyID,
|
||||
res: Resolution,
|
||||
attempt: u8,
|
||||
is_fetch: bool = false,
|
||||
},
|
||||
checkout: struct {
|
||||
repo_dir: bun.FileDescriptor,
|
||||
dependency_id: DependencyID,
|
||||
name: strings.StringOrTinyString,
|
||||
url: strings.StringOrTinyString,
|
||||
resolved: strings.StringOrTinyString,
|
||||
resolution: Resolution,
|
||||
target_dir: []const u8,
|
||||
patch_name_and_version_hash: ?u64,
|
||||
},
|
||||
};
|
||||
|
||||
pub const List = bun.io.heap.Intrusive(GitCommandRunner, *PackageManager, sortByTaskId);
|
||||
|
||||
fn sortByTaskId(_: *PackageManager, a: *GitCommandRunner, b: *GitCommandRunner) bool {
|
||||
return a.task_id.get() < b.task_id.get();
|
||||
}
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
pub const OutputReader = bun.io.BufferedReader;
|
||||
const uv = bun.windows.libuv;
|
||||
|
||||
fn resetOutputFlags(output: *OutputReader, fd: bun.FileDescriptor) void {
|
||||
output.flags.nonblocking = true;
|
||||
output.flags.socket = true;
|
||||
output.flags.memfd = false;
|
||||
output.flags.received_eof = false;
|
||||
output.flags.closed_without_reporting = false;
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
const flags = bun.sys.getFcntlFlags(fd).unwrap() catch @panic("Failed to get fcntl flags");
|
||||
bun.assertWithLocation(flags & bun.O.NONBLOCK != 0, @src());
|
||||
|
||||
const stat = bun.sys.fstat(fd).unwrap() catch @panic("Failed to fstat");
|
||||
bun.assertWithLocation(std.posix.S.ISSOCK(stat.mode), @src());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn loop(this: *const GitCommandRunner) *bun.uws.Loop {
|
||||
return this.manager.event_loop.loop();
|
||||
}
|
||||
|
||||
pub fn eventLoop(this: *const GitCommandRunner) *jsc.AnyEventLoop {
|
||||
return &this.manager.event_loop;
|
||||
}
|
||||
|
||||
pub fn onReaderDone(this: *GitCommandRunner) void {
|
||||
bun.assert(this.remaining_fds > 0);
|
||||
this.remaining_fds -= 1;
|
||||
this.maybeFinished();
|
||||
}
|
||||
|
||||
pub fn onReaderError(this: *GitCommandRunner, err: bun.sys.Error) void {
|
||||
bun.assert(this.remaining_fds > 0);
|
||||
this.remaining_fds -= 1;
|
||||
|
||||
Output.prettyErrorln("<r><red>error<r>: Failed to read git output due to error <b>{d} {s}<r>", .{
|
||||
err.errno,
|
||||
@tagName(err.getErrno()),
|
||||
});
|
||||
Output.flush();
|
||||
this.maybeFinished();
|
||||
}
|
||||
|
||||
fn maybeFinished(this: *GitCommandRunner) void {
|
||||
if (!this.has_called_process_exit or this.remaining_fds != 0)
|
||||
return;
|
||||
|
||||
const process = this.process orelse return;
|
||||
this.handleExit(process.status);
|
||||
}
|
||||
|
||||
fn ensureNotInHeap(this: *GitCommandRunner) void {
|
||||
if (this.heap.child != null or this.heap.next != null or this.heap.prev != null or this.manager.active_git_commands.root == this) {
|
||||
this.manager.active_git_commands.remove(this);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(
|
||||
manager: *PackageManager,
|
||||
task_id: Task.Id,
|
||||
argv_input: []const ?[*:0]const u8,
|
||||
operation: Operation,
|
||||
) void {
|
||||
// GitCommandRunner.spawn called
|
||||
|
||||
const runner = bun.new(GitCommandRunner, .{
|
||||
.manager = manager,
|
||||
.task_id = task_id,
|
||||
.operation = operation,
|
||||
});
|
||||
|
||||
runner.manager.active_git_commands.insert(runner);
|
||||
|
||||
// Find the git executable
|
||||
var path_buf: bun.PathBuffer = undefined;
|
||||
const git_path = bun.which(&path_buf, bun.getenvZ("PATH") orelse "", manager.cache_directory_path, "git") orelse {
|
||||
log("Failed to find git executable in PATH", .{});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
|
||||
// Copy argv to a local array to avoid const issues, using the full git path
|
||||
var argv: [16]?[*:0]const u8 = undefined;
|
||||
argv[0] = git_path.ptr; // Use the full path to git
|
||||
var argc: usize = 1;
|
||||
for (argv_input[1..]) |arg| {
|
||||
if (arg == null) break;
|
||||
argv[argc] = arg;
|
||||
argc += 1;
|
||||
}
|
||||
argv[argc] = null; // Ensure null termination
|
||||
|
||||
// Cache directory is manager.cache_directory_path
|
||||
|
||||
runner.remaining_fds = 0;
|
||||
var env_map = Repository.shared_env.get(manager.allocator, manager.env);
|
||||
const envp = env_map.createNullDelimitedEnvMap(manager.allocator) catch |err| {
|
||||
log("Failed to create env map: {}", .{err});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
|
||||
if (Environment.isWindows) {
|
||||
runner.stdout.source = .{ .pipe = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() };
|
||||
runner.stderr.source = .{ .pipe = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() };
|
||||
}
|
||||
|
||||
// Ensure cache directory exists before using it as cwd
|
||||
_ = manager.getCacheDirectory();
|
||||
|
||||
const spawn_options = bun.spawn.SpawnOptions{
|
||||
.stdin = .ignore,
|
||||
.stdout = if (Environment.isPosix) .buffer else .{ .buffer = runner.stdout.source.?.pipe },
|
||||
.stderr = if (Environment.isPosix) .buffer else .{ .buffer = runner.stderr.source.?.pipe },
|
||||
.argv0 = git_path.ptr,
|
||||
.cwd = manager.cache_directory_path,
|
||||
.windows = if (Environment.isWindows) .{
|
||||
.loop = jsc.EventLoopHandle.init(&manager.event_loop),
|
||||
},
|
||||
.stream = false,
|
||||
};
|
||||
|
||||
// About to spawn git process with argv[0]="{s}"
|
||||
if (comptime Environment.allow_assert) {
|
||||
log("Spawning git with argv[0]={s}, cwd={s}", .{ argv[0].?, manager.cache_directory_path });
|
||||
for (argv[0..argc]) |arg| {
|
||||
if (arg) |a| {
|
||||
log(" argv: {s}", .{a});
|
||||
}
|
||||
}
|
||||
}
|
||||
var spawn_result = bun.spawn.spawnProcess(&spawn_options, @ptrCast(&argv), envp) catch |err| {
|
||||
log("Failed to spawn git process: {} (argv[0]={s})", .{ err, argv[0].? });
|
||||
// Create a failed task with proper error message
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
var spawned = spawn_result.unwrap() catch |err| {
|
||||
log("Failed to unwrap spawn result: {}", .{err});
|
||||
// Create a failed task with proper error message
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
|
||||
// Git process spawned
|
||||
|
||||
if (comptime Environment.isPosix) {
|
||||
if (spawned.stdout) |stdout| {
|
||||
if (!spawned.memfds[1]) {
|
||||
runner.stdout.setParent(runner);
|
||||
_ = bun.sys.setNonblocking(stdout);
|
||||
runner.remaining_fds += 1;
|
||||
|
||||
resetOutputFlags(&runner.stdout, stdout);
|
||||
runner.stdout.start(stdout, true).unwrap() catch |err| {
|
||||
log("Failed to start stdout reader: {}", .{err});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
if (runner.stdout.handle.getPoll()) |poll| {
|
||||
poll.flags.insert(.socket);
|
||||
}
|
||||
} else {
|
||||
runner.stdout.setParent(runner);
|
||||
runner.stdout.startMemfd(stdout);
|
||||
}
|
||||
}
|
||||
if (spawned.stderr) |stderr| {
|
||||
if (!spawned.memfds[2]) {
|
||||
runner.stderr.setParent(runner);
|
||||
_ = bun.sys.setNonblocking(stderr);
|
||||
runner.remaining_fds += 1;
|
||||
|
||||
resetOutputFlags(&runner.stderr, stderr);
|
||||
runner.stderr.start(stderr, true).unwrap() catch |err| {
|
||||
log("Failed to start stderr reader: {}", .{err});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
if (runner.stderr.handle.getPoll()) |poll| {
|
||||
poll.flags.insert(.socket);
|
||||
}
|
||||
} else {
|
||||
runner.stderr.setParent(runner);
|
||||
runner.stderr.startMemfd(stderr);
|
||||
}
|
||||
}
|
||||
} else if (comptime Environment.isWindows) {
|
||||
if (spawned.stdout == .buffer) {
|
||||
runner.stdout.parent = runner;
|
||||
runner.remaining_fds += 1;
|
||||
runner.stdout.startWithCurrentPipe().unwrap() catch |err| {
|
||||
log("Failed to start stdout reader on Windows: {}", .{err});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
}
|
||||
if (spawned.stderr == .buffer) {
|
||||
runner.stderr.parent = runner;
|
||||
runner.remaining_fds += 1;
|
||||
runner.stderr.startWithCurrentPipe().unwrap() catch |err| {
|
||||
log("Failed to start stderr reader on Windows: {}", .{err});
|
||||
// Create a failed task
|
||||
const task = manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = manager,
|
||||
.log = logger.Log.init(manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = operation.clone.name,
|
||||
.url = operation.clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(manager.allocator) },
|
||||
.dep_id = operation.clone.dep_id,
|
||||
.res = operation.clone.res,
|
||||
},
|
||||
},
|
||||
.id = task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_clone = bun.invalid_fd },
|
||||
.status = .fail,
|
||||
.err = error.GitCommandFailed,
|
||||
};
|
||||
manager.resolve_tasks.push(task);
|
||||
manager.wake();
|
||||
runner.deinit();
|
||||
return;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const event_loop = &manager.event_loop;
|
||||
var process = spawned.toProcess(event_loop, false);
|
||||
|
||||
bun.assertf(runner.process == null, "forgot to call `resetPolls`", .{});
|
||||
runner.process = process;
|
||||
process.setExitHandler(runner);
|
||||
|
||||
switch (process.watchOrReap()) {
|
||||
.err => |err| {
|
||||
if (!process.hasExited())
|
||||
process.onExit(.{ .err = err }, &std.mem.zeroes(bun.spawn.Rusage));
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn handleExit(this: *GitCommandRunner, status: bun.spawn.Status) void {
|
||||
log("Git command finished: task_id={d}, status={}", .{ this.task_id.get(), status });
|
||||
|
||||
const stderr_text = this.stderr.finalBuffer().items;
|
||||
|
||||
this.ensureNotInHeap();
|
||||
|
||||
// Create a task with the result
|
||||
const task = this.manager.preallocated_resolve_tasks.get();
|
||||
|
||||
switch (this.operation) {
|
||||
.clone => |clone| {
|
||||
task.* = Task{
|
||||
.package_manager = this.manager,
|
||||
.log = logger.Log.init(this.manager.allocator),
|
||||
.tag = .git_clone,
|
||||
.request = .{
|
||||
.git_clone = .{
|
||||
.name = clone.name,
|
||||
.url = clone.url,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.manager.allocator) },
|
||||
.dep_id = clone.dep_id,
|
||||
.res = clone.res,
|
||||
},
|
||||
},
|
||||
.id = this.task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = undefined,
|
||||
.status = undefined,
|
||||
.err = null,
|
||||
};
|
||||
|
||||
switch (status) {
|
||||
.exited => |exit| {
|
||||
if (exit.code == 0) {
|
||||
// Success - get the git dir
|
||||
const folder_name = std.fmt.bufPrintZ(&folder_name_buf, "{any}.git", .{
|
||||
bun.fmt.hexIntLower(this.task_id.get()),
|
||||
}) catch unreachable;
|
||||
if (this.manager.getCacheDirectory().openDirZ(folder_name, .{})) |dir| {
|
||||
task.data = .{ .git_clone = bun.FileDescriptor.fromStdDir(dir) };
|
||||
task.status = .success;
|
||||
} else |err| {
|
||||
task.err = err;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_clone = bun.invalid_fd };
|
||||
}
|
||||
} else {
|
||||
task.err = error.GitCloneFailed;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_clone = bun.invalid_fd };
|
||||
|
||||
if (stderr_text.len > 0) {
|
||||
task.log.addErrorFmt(null, logger.Loc.Empty, this.manager.allocator, "git clone failed: {s}", .{stderr_text}) catch {};
|
||||
}
|
||||
}
|
||||
},
|
||||
.signaled => |signal| {
|
||||
task.err = error.GitCloneSignaled;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_clone = bun.invalid_fd };
|
||||
|
||||
const signal_code = bun.SignalCode.from(signal);
|
||||
task.log.addErrorFmt(null, logger.Loc.Empty, this.manager.allocator, "git clone terminated by {}", .{
|
||||
signal_code.fmt(Output.enable_ansi_colors_stderr),
|
||||
}) catch {};
|
||||
},
|
||||
.err => |_| {
|
||||
task.err = error.GitCloneFailed;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_clone = bun.invalid_fd };
|
||||
},
|
||||
else => {
|
||||
task.err = error.UnexpectedGitStatus;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_clone = bun.invalid_fd };
|
||||
},
|
||||
}
|
||||
},
|
||||
.checkout => |checkout| {
|
||||
// Handle two-phase checkout
|
||||
if (this.checkout_phase == .clone) {
|
||||
// First phase completed (clone --no-checkout)
|
||||
if (status == .exited and status.exited.code == 0) {
|
||||
|
||||
// Now run the actual checkout command
|
||||
this.checkout_phase = .checkout;
|
||||
|
||||
// Find the git executable
|
||||
var path_buf2: bun.PathBuffer = undefined;
|
||||
const git_path = bun.which(&path_buf2, bun.getenvZ("PATH") orelse "", this.manager.cache_directory_path, "git") orelse {
|
||||
log("Failed to find git executable in PATH for checkout", .{});
|
||||
this.handleCheckoutError(error.GitCommandFailed);
|
||||
return;
|
||||
};
|
||||
|
||||
// Build checkout command: git -C <folder> checkout --quiet <resolved>
|
||||
const target_dir_z = bun.default_allocator.dupeZ(u8, checkout.target_dir) catch unreachable;
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
log("Checkout target_dir: {s}", .{target_dir_z});
|
||||
log("Checkout resolved: {s}", .{checkout.resolved.slice()});
|
||||
}
|
||||
|
||||
const argv: [7]?[*:0]const u8 = .{
|
||||
git_path.ptr,
|
||||
"-C",
|
||||
target_dir_z,
|
||||
"checkout",
|
||||
"--quiet",
|
||||
bun.default_allocator.dupeZ(u8, checkout.resolved.slice()) catch unreachable,
|
||||
null,
|
||||
};
|
||||
|
||||
// Spawn the checkout command
|
||||
this.has_called_process_exit = false;
|
||||
this.remaining_fds = 0;
|
||||
this.resetPolls();
|
||||
|
||||
var env_map = Repository.shared_env.get(this.manager.allocator, this.manager.env);
|
||||
const envp = env_map.createNullDelimitedEnvMap(this.manager.allocator) catch |err| {
|
||||
log("Failed to create env map for checkout: {}", .{err});
|
||||
this.handleCheckoutError(error.EnvMapFailed);
|
||||
return;
|
||||
};
|
||||
|
||||
if (Environment.isWindows) {
|
||||
this.stdout.source = .{ .pipe = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() };
|
||||
this.stderr.source = .{ .pipe = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() };
|
||||
}
|
||||
|
||||
// Ensure cache directory exists before using it as cwd
|
||||
_ = this.manager.getCacheDirectory();
|
||||
|
||||
const spawn_options = bun.spawn.SpawnOptions{
|
||||
.stdin = .ignore,
|
||||
.stdout = if (Environment.isPosix) .buffer else .{ .buffer = this.stdout.source.?.pipe },
|
||||
.stderr = if (Environment.isPosix) .buffer else .{ .buffer = this.stderr.source.?.pipe },
|
||||
.argv0 = git_path.ptr,
|
||||
.cwd = this.manager.cache_directory_path,
|
||||
.windows = if (Environment.isWindows) .{
|
||||
.loop = jsc.EventLoopHandle.init(&this.manager.event_loop),
|
||||
},
|
||||
.stream = false,
|
||||
};
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
log("Spawning git checkout with cwd={s}", .{this.manager.cache_directory_path});
|
||||
for (argv) |arg| {
|
||||
if (arg) |a| {
|
||||
log(" argv: {s}", .{a});
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
var spawn_result = bun.spawn.spawnProcess(&spawn_options, @constCast(@ptrCast(&argv)), envp) catch |err| {
|
||||
log("Failed to spawn git checkout: {}", .{err});
|
||||
this.handleCheckoutError(err);
|
||||
return;
|
||||
};
|
||||
|
||||
var spawned = spawn_result.unwrap() catch |err| {
|
||||
log("Failed to unwrap git checkout spawn: {}", .{err});
|
||||
this.handleCheckoutError(err);
|
||||
return;
|
||||
};
|
||||
|
||||
// Set up process monitoring
|
||||
if (comptime Environment.isPosix) {
|
||||
if (spawned.stdout) |stdout| {
|
||||
if (!spawned.memfds[1]) {
|
||||
this.stdout.setParent(this);
|
||||
_ = bun.sys.setNonblocking(stdout);
|
||||
this.remaining_fds += 1;
|
||||
|
||||
resetOutputFlags(&this.stdout, stdout);
|
||||
this.stdout.start(stdout, true).unwrap() catch |err| {
|
||||
log("Failed to start stdout reader: {}", .{err});
|
||||
this.handleCheckoutError(err);
|
||||
return;
|
||||
};
|
||||
if (this.stdout.handle.getPoll()) |poll| {
|
||||
poll.flags.insert(.socket);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (spawned.stderr) |stderr| {
|
||||
if (!spawned.memfds[2]) {
|
||||
this.stderr.setParent(this);
|
||||
_ = bun.sys.setNonblocking(stderr);
|
||||
this.remaining_fds += 1;
|
||||
|
||||
resetOutputFlags(&this.stderr, stderr);
|
||||
this.stderr.start(stderr, true).unwrap() catch |err| {
|
||||
log("Failed to start stderr reader: {}", .{err});
|
||||
this.handleCheckoutError(err);
|
||||
return;
|
||||
};
|
||||
if (this.stderr.handle.getPoll()) |poll| {
|
||||
poll.flags.insert(.socket);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const event_loop = &this.manager.event_loop;
|
||||
var process = spawned.toProcess(event_loop, false);
|
||||
|
||||
this.process = process;
|
||||
process.setExitHandler(this);
|
||||
|
||||
switch (process.watchOrReap()) {
|
||||
.err => |err| {
|
||||
if (!process.hasExited())
|
||||
process.onExit(.{ .err = err }, &std.mem.zeroes(bun.spawn.Rusage));
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
|
||||
// Don't continue to the task creation yet
|
||||
return;
|
||||
} else {
|
||||
// Clone failed
|
||||
this.handleCheckoutError(error.GitCloneFailed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Second phase (actual checkout) completed
|
||||
task.* = Task{
|
||||
.package_manager = this.manager,
|
||||
.log = logger.Log.init(this.manager.allocator),
|
||||
.tag = .git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
.repo_dir = checkout.repo_dir,
|
||||
.dependency_id = checkout.dependency_id,
|
||||
.name = checkout.name,
|
||||
.url = checkout.url,
|
||||
.resolved = checkout.resolved,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.manager.allocator) },
|
||||
.resolution = checkout.resolution,
|
||||
},
|
||||
},
|
||||
.id = this.task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = undefined,
|
||||
.status = undefined,
|
||||
.err = null,
|
||||
.apply_patch_task = if (checkout.patch_name_and_version_hash) |h| brk: {
|
||||
const patch_hash = this.manager.lockfile.patched_dependencies.get(h).?.patchfileHash().?;
|
||||
const ptask = PatchTask.newApplyPatchHash(this.manager, checkout.dependency_id, patch_hash, h);
|
||||
ptask.callback.apply.task_id = this.task_id;
|
||||
break :brk ptask;
|
||||
} else null,
|
||||
};
|
||||
|
||||
switch (status) {
|
||||
.exited => |exit| {
|
||||
if (exit.code == 0) {
|
||||
// Success - create ExtractData
|
||||
const folder_name = PackageManager.cachedGitFolderNamePrint(&folder_name_buf, checkout.resolved.slice(), null);
|
||||
if (this.manager.getCacheDirectory().openDir(folder_name, .{})) |package_dir_const| {
|
||||
var package_dir = package_dir_const;
|
||||
defer package_dir.close();
|
||||
|
||||
// Delete .git directory
|
||||
package_dir.deleteTree(".git") catch {};
|
||||
|
||||
// Create .bun-tag file with resolved commit
|
||||
if (checkout.resolved.slice().len > 0) insert_tag: {
|
||||
const git_tag = package_dir.createFileZ(".bun-tag", .{ .truncate = true }) catch break :insert_tag;
|
||||
defer git_tag.close();
|
||||
git_tag.writeAll(checkout.resolved.slice()) catch {
|
||||
package_dir.deleteFileZ(".bun-tag") catch {};
|
||||
};
|
||||
}
|
||||
|
||||
// Read package.json if it exists
|
||||
if (bun.sys.File.readFileFrom(package_dir, "package.json", this.manager.allocator).unwrap()) |result| {
|
||||
const json_file, const json_buf_original = result;
|
||||
// Make a copy of the buffer to ensure it's not corrupted
|
||||
const json_buf = this.manager.allocator.dupe(u8, json_buf_original) catch json_buf_original;
|
||||
// Don't close the file yet - we're passing the buffer to the task
|
||||
// The file descriptor is just for reading, closing it shouldn't affect the buffer
|
||||
json_file.close();
|
||||
|
||||
var json_path_buf: bun.PathBuffer = undefined;
|
||||
if (json_file.getPath(&json_path_buf).unwrap()) |json_path| {
|
||||
const FileSystem = @import("../fs.zig").FileSystem;
|
||||
if (FileSystem.instance.dirname_store.append(@TypeOf(json_path), json_path)) |ret_json_path| {
|
||||
task.data = .{ .git_checkout = .{
|
||||
.url = checkout.url.slice(),
|
||||
.resolved = checkout.resolved.slice(),
|
||||
.json = .{
|
||||
.path = ret_json_path,
|
||||
.buf = json_buf,
|
||||
},
|
||||
} };
|
||||
task.status = .success;
|
||||
} else |err| {
|
||||
task.err = err;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
}
|
||||
} else |err| {
|
||||
task.err = err;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
}
|
||||
} else |err| {
|
||||
if (err == error.ENOENT) {
|
||||
// Allow git dependencies without package.json
|
||||
task.data = .{ .git_checkout = .{
|
||||
.url = checkout.url.slice(),
|
||||
.resolved = checkout.resolved.slice(),
|
||||
} };
|
||||
task.status = .success;
|
||||
} else {
|
||||
task.err = err;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
}
|
||||
}
|
||||
} else |err| {
|
||||
task.err = err;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
}
|
||||
} else {
|
||||
task.err = error.GitCheckoutFailed;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
|
||||
if (stderr_text.len > 0) {
|
||||
task.log.addErrorFmt(null, logger.Loc.Empty, this.manager.allocator, "git checkout failed: {s}", .{stderr_text}) catch {};
|
||||
}
|
||||
}
|
||||
},
|
||||
.signaled => |signal| {
|
||||
task.err = error.GitCheckoutSignaled;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
|
||||
const signal_code = bun.SignalCode.from(signal);
|
||||
task.log.addErrorFmt(null, logger.Loc.Empty, this.manager.allocator, "git checkout terminated by {}", .{
|
||||
signal_code.fmt(Output.enable_ansi_colors_stderr),
|
||||
}) catch {};
|
||||
},
|
||||
.err => |_| {
|
||||
task.err = error.GitCheckoutFailed;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
},
|
||||
else => {
|
||||
task.err = error.UnexpectedGitStatus;
|
||||
task.status = .fail;
|
||||
task.data = .{ .git_checkout = .{} };
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Push the task to the resolve queue
|
||||
this.manager.resolve_tasks.push(task);
|
||||
// Don't decrement pending tasks here - runTasks will do it when processing the task
|
||||
this.manager.wake();
|
||||
|
||||
this.deinit();
|
||||
}
|
||||
|
||||
pub fn onProcessExit(this: *GitCommandRunner, proc: *Process, _: bun.spawn.Status, _: *const bun.spawn.Rusage) void {
|
||||
// onProcessExit called
|
||||
if (this.process != proc) {
|
||||
Output.debugWarn("<d>[GitCommandRunner]<r> onProcessExit called with wrong process", .{});
|
||||
return;
|
||||
}
|
||||
this.has_called_process_exit = true;
|
||||
this.maybeFinished();
|
||||
}
|
||||
|
||||
pub fn resetPolls(this: *GitCommandRunner) void {
|
||||
if (comptime Environment.allow_assert) {
|
||||
bun.assert(this.remaining_fds == 0);
|
||||
}
|
||||
|
||||
if (this.process) |process| {
|
||||
this.process = null;
|
||||
process.close();
|
||||
process.deref();
|
||||
}
|
||||
|
||||
this.stdout.deinit();
|
||||
this.stderr.deinit();
|
||||
this.stdout = OutputReader.init(@This());
|
||||
this.stderr = OutputReader.init(@This());
|
||||
}
|
||||
|
||||
pub fn deinit(this: *GitCommandRunner) void {
|
||||
this.resetPolls();
|
||||
this.ensureNotInHeap();
|
||||
|
||||
this.stdout.deinit();
|
||||
this.stderr.deinit();
|
||||
|
||||
this.* = undefined;
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
// Dummy callback for the task - we never actually call this
|
||||
fn dummyCallback(_: *ThreadPool.Task) void {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
fn handleCheckoutError(this: *GitCommandRunner, err: anyerror) void {
|
||||
const task = this.manager.preallocated_resolve_tasks.get();
|
||||
task.* = Task{
|
||||
.package_manager = this.manager,
|
||||
.log = logger.Log.init(this.manager.allocator),
|
||||
.tag = .git_checkout,
|
||||
.request = .{
|
||||
.git_checkout = .{
|
||||
.repo_dir = this.operation.checkout.repo_dir,
|
||||
.dependency_id = this.operation.checkout.dependency_id,
|
||||
.name = this.operation.checkout.name,
|
||||
.url = this.operation.checkout.url,
|
||||
.resolved = this.operation.checkout.resolved,
|
||||
.env = DotEnv.Map{ .map = DotEnv.Map.HashTable.init(this.manager.allocator) },
|
||||
.resolution = this.operation.checkout.resolution,
|
||||
},
|
||||
},
|
||||
.id = this.task_id,
|
||||
.threadpool_task = ThreadPool.Task{ .callback = &dummyCallback },
|
||||
.data = .{ .git_checkout = .{} },
|
||||
.status = .fail,
|
||||
.err = err,
|
||||
.apply_patch_task = null, // Don't apply patches on error
|
||||
};
|
||||
|
||||
this.manager.resolve_tasks.push(task);
|
||||
this.manager.wake();
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
var folder_name_buf: [1024]u8 = undefined;
|
||||
|
||||
const std = @import("std");
|
||||
const Repository = @import("./repository.zig").Repository;
|
||||
|
||||
const DependencyID = @import("./install.zig").DependencyID;
|
||||
const ExtractData = @import("./install.zig").ExtractData;
|
||||
const PackageManager = @import("./install.zig").PackageManager;
|
||||
const PatchTask = @import("./install.zig").PatchTask;
|
||||
const Resolution = @import("./install.zig").Resolution;
|
||||
const Task = @import("./install.zig").Task;
|
||||
|
||||
const bun = @import("bun");
|
||||
const DotEnv = bun.DotEnv;
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const ThreadPool = bun.ThreadPool;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
const Process = bun.spawn.Process;
|
||||
@@ -247,6 +247,7 @@ pub const TextLockfile = @import("./lockfile/bun.lock.zig");
|
||||
pub const Bin = @import("./bin.zig").Bin;
|
||||
pub const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution;
|
||||
pub const LifecycleScriptSubprocess = @import("./lifecycle_script_runner.zig").LifecycleScriptSubprocess;
|
||||
pub const GitCommandRunner = @import("./git_command_runner.zig").GitCommandRunner;
|
||||
pub const PackageInstall = @import("./PackageInstall.zig").PackageInstall;
|
||||
pub const Repository = @import("./repository.zig").Repository;
|
||||
pub const Resolution = @import("./resolution.zig").Resolution;
|
||||
|
||||
@@ -406,6 +406,16 @@ pub const Repository = extern struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Handle shorthand formats like bitbucket:user/repo or gitlab:user/repo
|
||||
if (strings.indexOfChar(url, ':')) |colon_index| {
|
||||
const prefix = url[0..colon_index];
|
||||
if (Hosts.get(prefix)) |domain_suffix| {
|
||||
const path = url[colon_index + 1 ..];
|
||||
const result = std.fmt.bufPrint(&ssh_path_buf, "git@{s}{s}:{s}", .{ prefix, domain_suffix, path }) catch return null;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
if (strings.hasPrefixComptime(url, "git@") or strings.hasPrefixComptime(url, "ssh://")) {
|
||||
return url;
|
||||
}
|
||||
@@ -414,23 +424,21 @@ pub const Repository = extern struct {
|
||||
ssh_path_buf[0.."ssh://git@".len].* = "ssh://git@".*;
|
||||
var rest = ssh_path_buf["ssh://git@".len..];
|
||||
|
||||
const colon_index = strings.indexOfChar(url, ':');
|
||||
const colon_index = strings.indexOfChar(url, ':') orelse return null;
|
||||
|
||||
if (colon_index) |colon| {
|
||||
// make sure known hosts have `.com` or `.org`
|
||||
if (Hosts.get(url[0..colon])) |tld| {
|
||||
bun.copy(u8, rest, url[0..colon]);
|
||||
bun.copy(u8, rest[colon..], tld);
|
||||
rest[colon + tld.len] = '/';
|
||||
bun.copy(u8, rest[colon + tld.len + 1 ..], url[colon + 1 ..]);
|
||||
const out = ssh_path_buf[0 .. url.len + "ssh://git@".len + tld.len];
|
||||
return out;
|
||||
}
|
||||
// make sure known hosts have `.com` or `.org`
|
||||
if (Hosts.get(url[0..colon_index])) |tld| {
|
||||
bun.copy(u8, rest, url[0..colon_index]);
|
||||
bun.copy(u8, rest[colon_index..], tld);
|
||||
rest[colon_index + tld.len] = '/';
|
||||
bun.copy(u8, rest[colon_index + tld.len + 1 ..], url[colon_index + 1 ..]);
|
||||
const out = ssh_path_buf[0 .. url.len + "ssh://git@".len + tld.len];
|
||||
return out;
|
||||
}
|
||||
|
||||
bun.copy(u8, rest, url);
|
||||
if (colon_index) |colon| rest[colon] = '/';
|
||||
const final = ssh_path_buf[0 .. url.len + "ssh://".len];
|
||||
rest[colon_index] = '/';
|
||||
const final = ssh_path_buf[0 .. url.len + "ssh://git@".len];
|
||||
return final;
|
||||
}
|
||||
|
||||
@@ -442,6 +450,16 @@ pub const Repository = extern struct {
|
||||
return url;
|
||||
}
|
||||
|
||||
// Handle shorthand formats like bitbucket:user/repo or gitlab:user/repo
|
||||
if (strings.indexOfChar(url, ':')) |colon_index| {
|
||||
const prefix = url[0..colon_index];
|
||||
if (Hosts.get(prefix)) |domain_suffix| {
|
||||
const path = url[colon_index + 1 ..];
|
||||
const result = std.fmt.bufPrint(&final_path_buf, "https://{s}{s}/{s}", .{ prefix, domain_suffix, path }) catch return null;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
if (strings.hasPrefixComptime(url, "ssh://")) {
|
||||
final_path_buf[0.."https".len].* = "https".*;
|
||||
bun.copy(u8, final_path_buf["https".len..], url["ssh".len..]);
|
||||
@@ -486,6 +504,7 @@ pub const Repository = extern struct {
|
||||
attempt: u8,
|
||||
) !std.fs.Dir {
|
||||
bun.analytics.Features.git_dependencies += 1;
|
||||
// Repository.download called
|
||||
const folder_name = try std.fmt.bufPrintZ(&folder_name_buf, "{any}.git", .{
|
||||
bun.fmt.hexIntLower(task_id.get()),
|
||||
});
|
||||
|
||||
90
test/cli/install/git-dependency.test.ts
Normal file
90
test/cli/install/git-dependency.test.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { spawnSync } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
import { existsSync } from "fs";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
test("install github dependency", async () => {
|
||||
const dir = tempDirWithFiles("test-github-install", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-install",
|
||||
dependencies: {
|
||||
// Using github: shorthand which downloads as tarball
|
||||
"awesome-bun": "github:oven-sh/awesome-bun",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const result = spawnSync({
|
||||
cmd: [bunExe(), "install"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.stderr.toString()).not.toContain("error");
|
||||
|
||||
// Check that the package was installed
|
||||
const packagePath = join(dir, "node_modules", "awesome-bun");
|
||||
expect(existsSync(packagePath)).toBe(true);
|
||||
|
||||
// Should have README.md
|
||||
const readmePath = join(packagePath, "README.md");
|
||||
expect(existsSync(readmePath)).toBe(true);
|
||||
});
|
||||
|
||||
test("install git+https URL dependency", async () => {
|
||||
const dir = tempDirWithFiles("test-git-url", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-git-url",
|
||||
dependencies: {
|
||||
// Using git+ prefix which triggers git clone - use a smaller repo
|
||||
"awesome-bun": "git+https://github.com/oven-sh/awesome-bun.git#main",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const result = spawnSync({
|
||||
cmd: [bunExe(), "install"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.stderr.toString()).not.toContain("error");
|
||||
|
||||
// Check that the package was installed
|
||||
const packagePath = join(dir, "node_modules", "awesome-bun");
|
||||
expect(existsSync(packagePath)).toBe(true);
|
||||
});
|
||||
|
||||
test("install git URL without commit hash", async () => {
|
||||
const dir = tempDirWithFiles("test-git-no-hash", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-git-no-hash",
|
||||
dependencies: {
|
||||
// Using HEAD of default branch
|
||||
"awesome-bun-2": "git+https://github.com/oven-sh/awesome-bun.git",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const result = spawnSync({
|
||||
cmd: [bunExe(), "install"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.stderr.toString()).not.toContain("error");
|
||||
|
||||
// Check that the package was installed
|
||||
const packagePath = join(dir, "node_modules", "awesome-bun-2");
|
||||
expect(existsSync(packagePath)).toBe(true);
|
||||
});
|
||||
@@ -4,7 +4,7 @@
|
||||
"!= alloc.ptr": 0,
|
||||
"!= allocator.ptr": 0,
|
||||
".arguments_old(": 279,
|
||||
".stdDir()": 40,
|
||||
".stdDir()": 39,
|
||||
".stdFile()": 18,
|
||||
"// autofix": 168,
|
||||
": [a-zA-Z0-9_\\.\\*\\?\\[\\]\\(\\)]+ = undefined,": 228,
|
||||
|
||||
Reference in New Issue
Block a user