mirror of
https://github.com/oven-sh/bun
synced 2026-02-12 11:59:00 +00:00
Bugfixes to install (#1848)
This commit is contained in:
@@ -419,7 +419,16 @@ pub const Version = struct {
|
||||
// newspeak/repo
|
||||
// npm:package@1.2.3
|
||||
'n' => {
|
||||
if (strings.hasPrefixComptime(dependency, "npm:")) return .npm;
|
||||
if (strings.hasPrefixComptime(dependency, "npm:") and dependency.len > "npm:".len) {
|
||||
const remain = dependency["npm:".len + @boolToInt(dependency["npm:".len] == '@') ..];
|
||||
for (remain) |c, i| {
|
||||
if (c == '@') {
|
||||
return infer(remain[i + 1 ..]);
|
||||
}
|
||||
}
|
||||
|
||||
return .npm;
|
||||
}
|
||||
},
|
||||
// v1.2.3
|
||||
// verilog.tar.gz
|
||||
@@ -459,11 +468,20 @@ pub const Version = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const TagInfo = struct {
|
||||
name: String,
|
||||
tag: String,
|
||||
|
||||
fn eql(this: TagInfo, that: TagInfo, this_buf: []const u8, that_buf: []const u8) bool {
|
||||
return this.name.eql(that.name, this_buf, that_buf) and this.tag.eql(that.tag);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Value = union {
|
||||
uninitialized: void,
|
||||
|
||||
npm: NpmInfo,
|
||||
dist_tag: String,
|
||||
dist_tag: TagInfo,
|
||||
tarball: URI,
|
||||
folder: String,
|
||||
|
||||
@@ -537,7 +555,8 @@ pub fn parseWithTag(
|
||||
var input = dependency;
|
||||
const name = if (strings.hasPrefixComptime(input, "npm:")) sliced.sub(brk: {
|
||||
var str = input["npm:".len..];
|
||||
var i: usize = 0;
|
||||
var i: usize = @boolToInt(str.len > 0 and str[0] == '@');
|
||||
|
||||
while (i < str.len) : (i += 1) {
|
||||
if (str[i] == '@') {
|
||||
input = str[i + 1 ..];
|
||||
@@ -576,9 +595,49 @@ pub fn parseWithTag(
|
||||
};
|
||||
},
|
||||
.dist_tag => {
|
||||
var tag_to_use: String = sliced.value();
|
||||
|
||||
const actual = if (strings.hasPrefixComptime(dependency, "npm:") and dependency.len > "npm:".len)
|
||||
// npm:@foo/bar@latest
|
||||
sliced.sub(brk: {
|
||||
var i: usize = "npm:".len;
|
||||
|
||||
// npm:@foo/bar@latest
|
||||
// ^
|
||||
i += @boolToInt(dependency[i] == '@');
|
||||
|
||||
while (i < dependency.len) : (i += 1) {
|
||||
// npm:@foo/bar@latest
|
||||
// ^
|
||||
if (dependency[i] == '@') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
tag_to_use = sliced.sub(dependency[i + 1 ..]).value();
|
||||
if (tag_to_use.isEmpty()) {
|
||||
tag_to_use = String.from("latest");
|
||||
}
|
||||
|
||||
break :brk dependency["npm:".len..i];
|
||||
}).value()
|
||||
else
|
||||
alias;
|
||||
|
||||
// name should never be empty
|
||||
std.debug.assert(!actual.isEmpty());
|
||||
|
||||
// tag should never be empty
|
||||
std.debug.assert(!tag_to_use.isEmpty());
|
||||
|
||||
return Version{
|
||||
.literal = sliced.value(),
|
||||
.value = .{ .dist_tag = sliced.value() },
|
||||
.value = .{
|
||||
.dist_tag = .{
|
||||
.name = actual,
|
||||
.tag = tag_to_use,
|
||||
},
|
||||
},
|
||||
.tag = .dist_tag,
|
||||
};
|
||||
},
|
||||
|
||||
@@ -2092,13 +2092,16 @@ pub const PackageManager = struct {
|
||||
package.resolution.value.npm.version,
|
||||
);
|
||||
|
||||
var network_task = (try this.generateNetworkTaskForTarball(task_id, manifest.str(find_result.package.tarball_url), package)).?;
|
||||
if (try this.generateNetworkTaskForTarball(task_id, manifest.str(find_result.package.tarball_url), package)) |network_task| {
|
||||
return ResolvedPackageResult{
|
||||
.package = package,
|
||||
.is_first_time = true,
|
||||
.network_task = network_task,
|
||||
};
|
||||
}
|
||||
|
||||
return ResolvedPackageResult{
|
||||
.package = package,
|
||||
.is_first_time = true,
|
||||
.network_task = network_task,
|
||||
};
|
||||
// if we are in the middle of extracting this package, we should wait for it to finish
|
||||
return ResolvedPackageResult{ .package = package };
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
@@ -2108,7 +2111,9 @@ pub const PackageManager = struct {
|
||||
|
||||
pub fn generateNetworkTaskForTarball(this: *PackageManager, task_id: u64, url: string, package: Lockfile.Package) !?*NetworkTask {
|
||||
const dedupe_entry = try this.network_dedupe_map.getOrPut(this.allocator, task_id);
|
||||
if (dedupe_entry.found_existing) return null;
|
||||
if (dedupe_entry.found_existing) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var network_task = this.getNetworkTask();
|
||||
|
||||
@@ -2196,7 +2201,7 @@ pub const PackageManager = struct {
|
||||
// Resolve the version from the loaded NPM manifest
|
||||
const manifest = this.manifests.getPtr(name_hash) orelse return null; // manifest might still be downloading. This feels unreliable.
|
||||
const find_result: Npm.PackageManifest.FindResult = switch (version.tag) {
|
||||
.dist_tag => manifest.findByDistTag(this.lockfile.str(version.value.dist_tag)),
|
||||
.dist_tag => manifest.findByDistTag(this.lockfile.str(version.value.dist_tag.tag)),
|
||||
.npm => manifest.findBestVersion(version.value.npm.version),
|
||||
else => unreachable,
|
||||
} orelse return switch (version.tag) {
|
||||
@@ -2417,10 +2422,11 @@ pub const PackageManager = struct {
|
||||
const alias = dependency.name;
|
||||
const name = switch (dependency.version.tag) {
|
||||
.npm => dependency.version.value.npm.name,
|
||||
.dist_tag => dependency.version.value.dist_tag.name,
|
||||
else => alias,
|
||||
};
|
||||
const name_hash = switch (dependency.version.tag) {
|
||||
.npm => Lockfile.stringHash(this.lockfile.str(name)),
|
||||
.dist_tag, .npm => Lockfile.stringHash(this.lockfile.str(name)),
|
||||
else => dependency.name_hash,
|
||||
};
|
||||
const version = dependency.version;
|
||||
@@ -2470,7 +2476,7 @@ pub const PackageManager = struct {
|
||||
"package \"{s}\" with tag \"{s}\" not found, but package exists",
|
||||
.{
|
||||
this.lockfile.str(name),
|
||||
this.lockfile.str(version.value.dist_tag),
|
||||
this.lockfile.str(version.value.dist_tag.tag),
|
||||
},
|
||||
) catch unreachable;
|
||||
}
|
||||
|
||||
@@ -1517,7 +1517,7 @@ pub fn getPackageID(
|
||||
std.debug.assert(id != invalid_package_id - 1);
|
||||
}
|
||||
|
||||
if (resolutions[id].eql(
|
||||
if (id < resolutions.len and resolutions[id].eql(
|
||||
resolution,
|
||||
this.buffers.string_bytes.items,
|
||||
this.buffers.string_bytes.items,
|
||||
@@ -1545,7 +1545,7 @@ pub fn getPackageID(
|
||||
std.debug.assert(id != invalid_package_id);
|
||||
}
|
||||
|
||||
if (id == invalid_package_id - 1) return null;
|
||||
if (id >= resolutions.len) return null;
|
||||
|
||||
if (resolutions[id].eql(resolution, this.buffers.string_bytes.items, this.buffers.string_bytes.items)) {
|
||||
return id;
|
||||
@@ -2684,7 +2684,7 @@ pub const Package = extern struct {
|
||||
defer fallback.fixed_buffer_allocator.reset();
|
||||
const path = item.asString(allocator) orelse return error.InvalidPackageJSON;
|
||||
|
||||
var workspace_dir = std.fs.cwd().openDir(path, .{}) catch |err| {
|
||||
var workspace_dir = std.fs.cwd().openIterableDir(path, .{}) catch |err| {
|
||||
if (err == error.FileNotFound) {
|
||||
log.addErrorFmt(
|
||||
&source,
|
||||
@@ -2713,8 +2713,14 @@ pub const Package = extern struct {
|
||||
};
|
||||
defer workspace_dir.close();
|
||||
|
||||
var workspace_file = workspace_dir.openFile("package.json", .{ .mode = .read_only }) catch |err| {
|
||||
log.addErrorFmt(&source, item.loc, allocator, "{s} opening package.json for workspace package \"{s}\" from \"{s}\"", .{ @errorName(err), path, std.os.getcwd(allocator.alloc(u8, bun.MAX_PATH_BYTES) catch unreachable) catch unreachable }) catch {};
|
||||
var workspace_file = workspace_dir.dir.openFile("package.json", .{ .mode = .read_only }) catch |err| {
|
||||
log.addErrorFmt(
|
||||
&source,
|
||||
item.loc,
|
||||
allocator,
|
||||
"{s} opening package.json for workspace package \"{s}\" from \"{s}\"",
|
||||
.{ @errorName(err), path, std.os.getcwd(allocator.alloc(u8, bun.MAX_PATH_BYTES) catch unreachable) catch unreachable },
|
||||
) catch {};
|
||||
workspace_names[i] = "";
|
||||
// report errors for multiple workspaces
|
||||
continue;
|
||||
@@ -3398,6 +3404,11 @@ const Buffers = struct {
|
||||
try alias_map.put(allocator, this.resolutions.items[i], dep.name);
|
||||
}
|
||||
},
|
||||
.dist_tag => {
|
||||
if (!dep.name.eql(dep.version.value.dist_tag.name, string_buf, string_buf)) {
|
||||
try alias_map.put(allocator, this.resolutions.items[i], dep.name);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -577,7 +577,13 @@ pub const PackageManifest = struct {
|
||||
timer = std.time.Timer.start() catch @panic("timer fail");
|
||||
}
|
||||
defer cache_file.close();
|
||||
var bytes = try cache_file.readToEndAlloc(allocator, std.math.maxInt(u32));
|
||||
var bytes = try cache_file.readToEndAllocOptions(
|
||||
allocator,
|
||||
std.math.maxInt(u32),
|
||||
cache_file.getEndPos() catch null,
|
||||
@alignOf(u8),
|
||||
null,
|
||||
);
|
||||
errdefer allocator.free(bytes);
|
||||
if (bytes.len < header_bytes.len) return null;
|
||||
const result = try readAll(bytes);
|
||||
|
||||
@@ -19,6 +19,12 @@ pub const String = extern struct {
|
||||
/// 3. If the final bit is not set, then it's a string that is stored in an external buffer.
|
||||
bytes: [max_inline_len]u8 = [8]u8{ 0, 0, 0, 0, 0, 0, 0, 0 },
|
||||
|
||||
/// Create an inline string
|
||||
pub fn from(inlinable_buffer: []const u8) String {
|
||||
std.debug.assert(inlinable_buffer.len <= max_inline_len);
|
||||
return String.init(inlinable_buffer, inlinable_buffer);
|
||||
}
|
||||
|
||||
pub const Tag = enum {
|
||||
small,
|
||||
big,
|
||||
@@ -829,17 +835,13 @@ pub const Version = extern struct {
|
||||
.pre => {
|
||||
result.tag.pre = sliced_string.sub(input[start..i]).external();
|
||||
// a pre can contain multiple consecutive tags
|
||||
if (comptime Environment.isDebug) {
|
||||
std.debug.assert(!strings.startsWithChar(result.tag.pre.slice(sliced_string.buf), '-'));
|
||||
}
|
||||
// checking for "-" prefix is not enough, as --canary.67e7966.0 is a valid tag
|
||||
state = State.none;
|
||||
},
|
||||
.build => {
|
||||
// a build can contain multiple consecutive tags
|
||||
result.tag.build = sliced_string.sub(input[start..i]).external();
|
||||
if (comptime Environment.isDebug) {
|
||||
std.debug.assert(!strings.startsWithChar(result.tag.build.slice(sliced_string.buf), '+'));
|
||||
}
|
||||
|
||||
state = State.none;
|
||||
},
|
||||
}
|
||||
@@ -1789,6 +1791,15 @@ pub const Query = struct {
|
||||
},
|
||||
}
|
||||
} else if (count == 0) {
|
||||
// From a semver perspective, treat "--foo" the same as "-foo"
|
||||
// example: foo/bar@1.2.3@--canary.24
|
||||
// ^
|
||||
if (token.tag == .none) {
|
||||
is_or = false;
|
||||
token.wildcard = .none;
|
||||
prev_token.tag = .none;
|
||||
continue;
|
||||
}
|
||||
try list.andRange(token.toRange(parse_result.version));
|
||||
} else if (is_or) {
|
||||
try list.orRange(token.toRange(parse_result.version));
|
||||
|
||||
Reference in New Issue
Block a user