feat(install): automatically migrate package-lock.json to bun.lockb (#6352)

* work so far

* stuff

* a

* basics work

* stuff

* yoo

* build lockfile

* correct

* f

* a

* install fixture havent tested

* i made it worse

* lol

* be more reasonable

* make the test easier to pass because bun install doesn't handle obscure lockfile edge cases :/

* a

* works now

* ok

* a

* a

* cool

* nah

* fix stuff

* l

* a

* idfk

* LAME

* prettier errors

* does this fix tests?

* Add more safety checks to Integrity

* Add another check

* More careful lifetime handling

* Fix linux debugger issue

* a

* tmp dir and snapshot test

---------

Co-authored-by: Jarred SUmner <jarred@jarredsumner.com>
This commit is contained in:
dave caruso
2023-10-11 02:27:07 -07:00
committed by GitHub
parent 6a17ebe669
commit 1bf28e0d77
53 changed files with 41622 additions and 252 deletions

6
.vscode/launch.json generated vendored
View File

@@ -306,14 +306,14 @@
"request": "launch",
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"args": ["install", "--frozen-lockfile"],
"cwd": "${fileDirname}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_Lockfile": "1"
}
},
{
"type": "lldb",
"request": "launch",

View File

@@ -145,7 +145,22 @@ pub fn build(b: *Build) !void {
};
}
const required_zig_version = "0.12.0-dev.163+6780a6bbf";
pub fn build_(b: *Build) !void {
if (!std.mem.eql(u8, @import("builtin").zig_version_string, required_zig_version)) {
const colors = std.io.getStdErr().supportsAnsiEscapeCodes();
std.debug.print(
"{s}WARNING:\nBun requires zig version '{s}', but found '{s}', build may fail...\nMake sure you installed the right version as per https://bun.sh/docs/project/development#install-zig\n{s}You can update to the right version using 'zigup {s}'\n\n",
.{
if (colors) "\x1b[1;33m" else "",
required_zig_version,
@import("builtin").zig_version_string,
if (colors) "\x1b[0m" else "",
required_zig_version,
},
);
}
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,12 +1,30 @@
#include "root.h"
#include "JavaScriptCore/VM.h"
// On Linux, signals are used to suspend/resume threads in JavaScriptCore
// When `.acquireAccess` is called, the signal might be raised.
// This causes issues with LLDB which might catch the signal.
// So we want to avoid that, we really only want this code to be executed when the debugger is attached
// But it's pretty hard to tell if LLDB is attached or not, so we just disable this code on Linux when in debug mode
#ifndef ACQUIRE_RELEASE_HEAP_ACCESS
#if OS(DARWIN)
#define ACQUIRE_RELEASE_HEAP_ACCESS 1
#else
#ifndef BUN_DEBUG
#define ACQUIRE_RELEASE_HEAP_ACCESS 1
#endif
#endif
#endif
extern "C" void bun_on_tick_before(JSC::VM* vm)
{
// Let the GC do some work while we are idle
#if ACQUIRE_RELEASE_HEAP_ACCESS
vm->heap.releaseAccess();
#endif
}
extern "C" void bun_on_tick_after(JSC::VM* vm)
{
#if ACQUIRE_RELEASE_HEAP_ACCESS
vm->heap.acquireAccess();
#endif
}

View File

@@ -243,6 +243,32 @@ pub const PackageManagerCommand = struct {
}
}
Global.exit(0);
} else if (strings.eqlComptime(subcommand, "migrate")) {
if (!pm.options.enable.force_save_lockfile) try_load_bun: {
std.fs.cwd().accessZ("bun.lockb", .{ .mode = .read_only }) catch break :try_load_bun;
Output.prettyErrorln(
\\<r><red>error<r>: bun.lockb already exists
\\run with --force to overwrite
, .{});
Global.exit(1);
}
const load_lockfile = @import("../install/migration.zig").detectAndLoadOtherLockfile(
pm.lockfile,
ctx.allocator,
pm.log,
pm.options.lockfile_path,
);
if (load_lockfile == .not_found) {
Output.prettyErrorln(
\\<r><red>error<r>: could not find any other lockfile
, .{});
Global.exit(1);
}
handleLoadLockfileErrors(load_lockfile, pm);
const lockfile = load_lockfile.ok;
lockfile.saveToDisk(pm.options.lockfile_path);
Global.exit(0);
}
@@ -258,6 +284,7 @@ pub const PackageManagerCommand = struct {
\\ bun pm <b>hash-print<r> print the hash stored in the current lockfile
\\ bun pm <b>cache<r> print the path to the cache folder
\\ bun pm <b>cache rm<r> clear the cache
\\ bun pm <b>migrate<r> migrate another package manager's lockfile without installing anything
\\
\\Learn more about these at <magenta>https://bun.sh/docs/install/utilities<r>
\\

View File

@@ -49,7 +49,7 @@ version: Dependency.Version = .{},
/// - `peerDependencies`
/// Technically, having the same package name specified under multiple fields is invalid
/// But we don't want to allocate extra arrays for them. So we use a bitfield instead.
behavior: Behavior = .uninitialized,
behavior: Behavior = Behavior.uninitialized,
/// Sorting order for dependencies is:
/// 1. [ `peerDependencies`, `optionalDependencies`, `devDependencies`, `dependencies` ]
@@ -147,7 +147,7 @@ pub fn toDependency(
return Dependency{
.name = name,
.name_hash = @as(u64, @bitCast(this[8..16].*)),
.behavior = @as(Dependency.Behavior, @enumFromInt(this[16])),
.behavior = @bitCast(this[16]),
.version = Dependency.Version.toVersion(name, this[17..this.len].*, ctx),
};
}
@@ -156,7 +156,7 @@ pub fn toExternal(this: Dependency) External {
var bytes: External = undefined;
bytes[0..this.name.bytes.len].* = this.name.bytes;
bytes[8..16].* = @as([8]u8, @bitCast(this.name_hash));
bytes[16] = @intFromEnum(this.behavior);
bytes[16] = @bitCast(this.behavior);
bytes[17..bytes.len].* = this.version.toExternal();
return bytes;
}
@@ -221,12 +221,16 @@ pub inline fn isGitHubRepoPath(dependency: string) bool {
return hash_index != dependency.len - 1 and first_slash_index > 0 and first_slash_index != dependency.len - 1;
}
// Github allows for the following format of URL:
// https://github.com/<org>/<repo>/tarball/<ref>
// This is a legacy (but still supported) method of retrieving a tarball of an
// entire source tree at some git reference. (ref = branch, tag, etc. Note: branch
// can have arbitrary number of slashes)
/// Github allows for the following format of URL:
/// https://github.com/<org>/<repo>/tarball/<ref>
/// This is a legacy (but still supported) method of retrieving a tarball of an
/// entire source tree at some git reference. (ref = branch, tag, etc. Note: branch
/// can have arbitrary number of slashes)
///
/// This also checks for a github url that ends with ".tar.gz"
pub inline fn isGitHubTarballPath(dependency: string) bool {
if (isTarball(dependency)) return true;
var parts = strings.split(dependency, "/");
var n_parts: usize = 0;
@@ -248,7 +252,7 @@ pub inline fn isTarball(dependency: string) bool {
}
pub const Version = struct {
tag: Dependency.Version.Tag = .uninitialized,
tag: Tag = .uninitialized,
literal: String = .{},
value: Value = .{ .uninitialized = {} },
@@ -610,7 +614,7 @@ pub const Version = struct {
}
};
const NpmInfo = struct {
pub const NpmInfo = struct {
name: String,
version: Semver.Query.Group,
@@ -619,7 +623,7 @@ pub const Version = struct {
}
};
const TagInfo = struct {
pub const TagInfo = struct {
name: String,
tag: String,
@@ -628,7 +632,7 @@ pub const Version = struct {
}
};
const TarballInfo = struct {
pub const TarballInfo = struct {
uri: URI,
package_name: String = .{},
@@ -670,7 +674,8 @@ pub inline fn parse(
sliced: *const SlicedString,
log: ?*logger.Log,
) ?Version {
return parseWithOptionalTag(allocator, alias, dependency, null, sliced, log);
const dep = std.mem.trimLeft(u8, dependency, " \t\n\r");
return parseWithTag(allocator, alias, dep, Version.Tag.infer(dep), sliced, log);
}
pub fn parseWithOptionalTag(
@@ -888,6 +893,12 @@ pub fn parseWithTag(
.literal = sliced.value(),
.value = .{ .tarball = .{ .uri = .{ .local = sliced.sub(dependency[7..]).value() } } },
};
} else if (strings.hasPrefixComptime(dependency, "file:")) {
return .{
.tag = .tarball,
.literal = sliced.value(),
.value = .{ .tarball = .{ .uri = .{ .local = sliced.sub(dependency[5..]).value() } } },
};
} else if (strings.contains(dependency, "://")) {
if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "invalid or unsupported dependency \"{s}\"", .{dependency}) catch unreachable;
return null;
@@ -950,78 +961,83 @@ pub fn parseWithTag(
}
}
pub const Behavior = enum(u8) {
uninitialized = 0,
_,
pub const Behavior = packed struct(u8) {
pub const uninitialized: Behavior = .{};
pub const normal: u8 = 1 << 1;
pub const optional: u8 = 1 << 2;
pub const dev: u8 = 1 << 3;
pub const peer: u8 = 1 << 4;
pub const workspace: u8 = 1 << 5;
// these padding fields are to have compatibility
// with older versions of lockfile v2
_unused_1: u1 = 0,
normal: bool = false,
optional: bool = false,
dev: bool = false,
peer: bool = false,
workspace: bool = false,
_unused_2: u2 = 0,
pub const normal = Behavior{ .normal = true };
pub const optional = Behavior{ .optional = true };
pub const dev = Behavior{ .dev = true };
pub const peer = Behavior{ .peer = true };
pub const workspace = Behavior{ .workspace = true };
pub inline fn isNormal(this: Behavior) bool {
return (@intFromEnum(this) & Behavior.normal) != 0;
return this.normal;
}
pub inline fn isOptional(this: Behavior) bool {
return (@intFromEnum(this) & Behavior.optional) != 0 and !this.isPeer();
return this.optional and !this.isPeer();
}
pub inline fn isDev(this: Behavior) bool {
return (@intFromEnum(this) & Behavior.dev) != 0;
return this.dev;
}
pub inline fn isPeer(this: Behavior) bool {
return (@intFromEnum(this) & Behavior.peer) != 0;
return this.peer;
}
pub inline fn isWorkspace(this: Behavior) bool {
return (@intFromEnum(this) & Behavior.workspace) != 0;
return this.workspace;
}
pub inline fn setNormal(this: Behavior, value: bool) Behavior {
if (value) {
return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.normal));
} else {
return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.normal));
}
var b = this;
b.normal = value;
return b;
}
pub inline fn setOptional(this: Behavior, value: bool) Behavior {
if (value) {
return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.optional));
} else {
return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.optional));
}
var b = this;
b.optional = value;
return b;
}
pub inline fn setDev(this: Behavior, value: bool) Behavior {
if (value) {
return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.dev));
} else {
return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.dev));
}
var b = this;
b.dev = value;
return b;
}
pub inline fn setPeer(this: Behavior, value: bool) Behavior {
if (value) {
return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.peer));
} else {
return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.peer));
}
var b = this;
b.peer = value;
return b;
}
pub inline fn setWorkspace(this: Behavior, value: bool) Behavior {
if (value) {
return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.workspace));
} else {
return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.workspace));
}
var b = this;
b.workspace = value;
return b;
}
pub inline fn eq(lhs: Behavior, rhs: Behavior) bool {
return @as(u8, @bitCast(lhs)) == @as(u8, @bitCast(rhs));
}
pub inline fn cmp(lhs: Behavior, rhs: Behavior) std.math.Order {
if (@intFromEnum(lhs) == @intFromEnum(rhs)) {
if (eq(lhs, rhs)) {
return .eq;
}
@@ -1074,4 +1090,42 @@ pub const Behavior = enum(u8) {
(features.peer_dependencies and this.isPeer()) or
this.isWorkspace();
}
pub fn format(self: Behavior, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
const fields = std.meta.fields(Behavior);
var num_fields: u8 = 0;
inline for (fields) |f| {
if (f.type == bool and @field(self, f.name)) {
num_fields += 1;
}
}
switch (num_fields) {
0 => try writer.writeAll("Behavior.uninitialized"),
1 => {
inline for (fields) |f| {
if (f.type == bool and @field(self, f.name)) {
try writer.writeAll("Behavior." ++ f.name);
break;
}
}
},
else => {
try writer.writeAll("Behavior{");
inline for (fields) |f| {
if (f.type == bool and @field(self, f.name)) {
try writer.writeAll(" " ++ f.name);
}
}
try writer.writeAll(" }");
},
}
}
comptime {
std.debug.assert(@as(u8, @bitCast(Behavior.normal)) == (1 << 1));
std.debug.assert(@as(u8, @bitCast(Behavior.optional)) == (1 << 2));
std.debug.assert(@as(u8, @bitCast(Behavior.dev)) == (1 << 3));
std.debug.assert(@as(u8, @bitCast(Behavior.peer)) == (1 << 4));
std.debug.assert(@as(u8, @bitCast(Behavior.workspace)) == (1 << 5));
}
};

View File

@@ -123,8 +123,8 @@ pub fn ExternalSlice(comptime Type: type) type {
pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type {
return extern struct {
const alignment = alignment_ orelse @alignOf(*Type);
const Slice = @This();
pub const alignment = alignment_ orelse @alignOf(*Type);
pub const Slice = @This();
pub const Child: type = Type;
@@ -170,7 +170,7 @@ pub const ExternalStringMap = extern struct {
value: ExternalStringList = .{},
};
pub const PackageNameHash = u64;
pub const PackageNameHash = u64; // Use String.Builder.stringHash to compute this
pub const Aligner = struct {
pub fn write(comptime Type: type, comptime Writer: type, writer: Writer, pos: usize) !usize {
@@ -2618,7 +2618,7 @@ pub const PackageManager = struct {
if (comptime Environment.allow_assert) {
std.debug.assert(dependency_id < buffers.resolutions.items.len);
std.debug.assert(package_id < this.lockfile.packages.len);
std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
// std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
}
buffers.resolutions.items[dependency_id] = package_id;
const string_buf = buffers.string_bytes.items;
@@ -4430,6 +4430,9 @@ pub const PackageManager = struct {
manager.setPreinstallState(package_id, manager.lockfile, .done);
if (comptime @TypeOf(callbacks.onExtract) != void) {
if (ExtractCompletionContext == *PackageInstaller) {
extract_ctx.fixCachedLockfilePackageSlices();
}
callbacks.onExtract(extract_ctx, dependency_id, task.data.extract, comptime log_level);
}
@@ -6844,6 +6847,7 @@ pub const PackageManager = struct {
folder_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined,
install_count: usize = 0,
successfully_installed: Bitset,
tree_iterator: *Lockfile.Tree.Iterator,
// For linking native binaries, we only want to link after we've installed the companion dependencies
// We don't want to introduce dependent callbacks like that for every single package
@@ -6855,6 +6859,16 @@ pub const PackageManager = struct {
node_modules_folder: std.fs.IterableDir,
};
/// Call when you mutate the length of `lockfile.packages`
pub fn fixCachedLockfilePackageSlices(this: *PackageInstaller) void {
var packages = this.lockfile.packages.slice();
this.metas = packages.items(.meta);
this.names = packages.items(.name);
this.bins = packages.items(.bin);
this.resolutions = packages.items(.resolution);
this.tree_iterator.reload(this.lockfile);
}
/// Install versions of a package which are waiting on a network request
pub fn installEnqueuedPackages(
this: *PackageInstaller,
@@ -7463,38 +7477,38 @@ pub const PackageManager = struct {
var summary = PackageInstall.Summary{};
{
var parts = lockfile.packages.slice();
var metas = parts.items(.meta);
var names = parts.items(.name);
var dependencies = lockfile.buffers.dependencies.items;
const resolutions_buffer: []const PackageID = lockfile.buffers.resolutions.items;
const resolution_lists: []const Lockfile.PackageIDSlice = parts.items(.resolutions);
var resolutions = parts.items(.resolution);
var iterator = Lockfile.Tree.Iterator.init(lockfile);
var installer = PackageInstaller{
.manager = this,
.options = &this.options,
.metas = metas,
.bins = parts.items(.bin),
.root_node_modules_folder = node_modules_folder,
.names = names,
.resolutions = resolutions,
.lockfile = lockfile,
.node = &install_node,
.node_modules_folder = node_modules_folder,
.progress = progress,
.skip_verify_installed_version_number = skip_verify_installed_version_number,
.skip_delete = skip_delete,
.summary = &summary,
.global_bin_dir = this.options.global_bin_dir,
.force_install = force_install,
.install_count = lockfile.buffers.hoisted_dependencies.items.len,
.successfully_installed = try Bitset.initEmpty(
this.allocator,
lockfile.packages.len,
),
var installer: PackageInstaller = brk: {
// These slices potentially get resized during iteration
// so we want to make sure they're not accessible to the rest of this function
// to make mistakes harder
var parts = lockfile.packages.slice();
break :brk PackageInstaller{
.manager = this,
.options = &this.options,
.metas = parts.items(.meta),
.bins = parts.items(.bin),
.root_node_modules_folder = node_modules_folder,
.names = parts.items(.name),
.resolutions = parts.items(.resolution),
.lockfile = lockfile,
.node = &install_node,
.node_modules_folder = node_modules_folder,
.progress = progress,
.skip_verify_installed_version_number = skip_verify_installed_version_number,
.skip_delete = skip_delete,
.summary = &summary,
.global_bin_dir = this.options.global_bin_dir,
.force_install = force_install,
.install_count = lockfile.buffers.hoisted_dependencies.items.len,
.successfully_installed = try Bitset.initEmpty(
this.allocator,
lockfile.packages.len,
),
.tree_iterator = &iterator,
};
};
while (iterator.nextNodeModulesFolder()) |node_modules| {
@@ -7587,87 +7601,95 @@ pub const PackageManager = struct {
if (!installer.options.do.install_packages) return error.InstallFailed;
summary.successfully_installed = installer.successfully_installed;
outer: for (installer.platform_binlinks.items) |deferred| {
const dependency_id = deferred.dependency_id;
const package_id = resolutions_buffer[dependency_id];
const folder = deferred.node_modules_folder;
{
var parts = lockfile.packages.slice();
var metas = parts.items(.meta);
var names = parts.items(.name);
var dependencies = lockfile.buffers.dependencies.items;
const resolutions_buffer: []const PackageID = lockfile.buffers.resolutions.items;
const resolution_lists: []const Lockfile.PackageIDSlice = parts.items(.resolutions);
outer: for (installer.platform_binlinks.items) |deferred| {
const dependency_id = deferred.dependency_id;
const package_id = resolutions_buffer[dependency_id];
const folder = deferred.node_modules_folder;
const package_resolutions: []const PackageID = resolution_lists[package_id].get(resolutions_buffer);
const original_bin: Bin = installer.bins[package_id];
const package_resolutions: []const PackageID = resolution_lists[package_id].get(resolutions_buffer);
const original_bin: Bin = installer.bins[package_id];
for (package_resolutions) |resolved_id| {
if (resolved_id >= names.len) continue;
const meta: Lockfile.Package.Meta = metas[resolved_id];
for (package_resolutions) |resolved_id| {
if (resolved_id >= names.len) continue;
const meta: Lockfile.Package.Meta = metas[resolved_id];
// This is specifically for platform-specific binaries
if (meta.os == .all and meta.arch == .all) continue;
// This is specifically for platform-specific binaries
if (meta.os == .all and meta.arch == .all) continue;
// Don't attempt to link incompatible binaries
if (meta.isDisabled()) continue;
// Don't attempt to link incompatible binaries
if (meta.isDisabled()) continue;
const name = lockfile.str(&dependencies[dependency_id].name);
const name = lockfile.str(&dependencies[dependency_id].name);
if (!installer.has_created_bin) {
if (!this.options.global) {
if (comptime Environment.isWindows) {
std.os.mkdiratW(node_modules_folder.dir.fd, bun.strings.w(".bin"), 0) catch {};
} else {
node_modules_folder.dir.makeDirZ(".bin") catch {};
}
}
if (comptime Environment.isPosix)
Bin.Linker.umask = C.umask(0);
installer.has_created_bin = true;
}
var bin_linker = Bin.Linker{
.bin = original_bin,
.package_installed_node_modules = bun.toFD(folder.dir.fd),
.root_node_modules_folder = bun.toFD(node_modules_folder.dir.fd),
.global_bin_path = this.options.bin_path,
.global_bin_dir = this.options.global_bin_dir.dir,
.package_name = strings.StringOrTinyString.init(name),
.string_buf = lockfile.buffers.string_bytes.items,
.extern_string_buf = lockfile.buffers.extern_strings.items,
};
bin_linker.link(this.options.global);
if (bin_linker.err) |err| {
if (comptime log_level != .silent) {
const fmt = "\n<r><red>error:<r> linking <b>{s}<r>: {s}\n";
const args = .{ name, @errorName(err) };
if (comptime log_level.showProgress()) {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
},
if (!installer.has_created_bin) {
if (!this.options.global) {
if (comptime Environment.isWindows) {
std.os.mkdiratW(node_modules_folder.dir.fd, bun.strings.w(".bin"), 0) catch {};
} else {
node_modules_folder.dir.makeDirZ(".bin") catch {};
}
} else {
Output.prettyErrorln(fmt, args);
}
if (comptime Environment.isPosix)
Bin.Linker.umask = C.umask(0);
installer.has_created_bin = true;
}
if (this.options.enable.fail_early) Global.crash();
var bin_linker = Bin.Linker{
.bin = original_bin,
.package_installed_node_modules = bun.toFD(folder.dir.fd),
.root_node_modules_folder = bun.toFD(node_modules_folder.dir.fd),
.global_bin_path = this.options.bin_path,
.global_bin_dir = this.options.global_bin_dir.dir,
.package_name = strings.StringOrTinyString.init(name),
.string_buf = lockfile.buffers.string_bytes.items,
.extern_string_buf = lockfile.buffers.extern_strings.items,
};
bin_linker.link(this.options.global);
if (bin_linker.err) |err| {
if (comptime log_level != .silent) {
const fmt = "\n<r><red>error:<r> linking <b>{s}<r>: {s}\n";
const args = .{ name, @errorName(err) };
if (comptime log_level.showProgress()) {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
},
}
} else {
Output.prettyErrorln(fmt, args);
}
}
if (this.options.enable.fail_early) Global.crash();
}
continue :outer;
}
continue :outer;
}
if (comptime log_level != .silent) {
const fmt = "\n<r><yellow>warn:<r> no compatible binaries found for <b>{s}<r>\n";
const args = .{lockfile.str(&names[package_id])};
if (comptime log_level != .silent) {
const fmt = "\n<r><yellow>warn:<r> no compatible binaries found for <b>{s}<r>\n";
const args = .{lockfile.str(&names[package_id])};
if (comptime log_level.showProgress()) {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
},
if (comptime log_level.showProgress()) {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
},
}
} else {
Output.prettyErrorln(fmt, args);
}
} else {
Output.prettyErrorln(fmt, args);
}
}
}
@@ -7726,15 +7748,17 @@ pub const PackageManager = struct {
)
else
.{ .not_found = {} };
var root = Lockfile.Package{};
var needs_new_lockfile = load_lockfile_result != .ok or (load_lockfile_result.ok.buffers.dependencies.items.len == 0 and manager.package_json_updates.len > 0);
var needs_new_lockfile = load_lockfile_result != .ok or
(load_lockfile_result.ok.buffers.dependencies.items.len == 0 and manager.package_json_updates.len > 0);
// this defaults to false
// but we force allowing updates to the lockfile when you do bun add
var had_any_diffs = false;
manager.progress = .{};
// Step 2. Parse the package.json file
//
var package_json_source = logger.Source.initPathString(package_json_cwd, package_json_contents);
switch (load_lockfile_result) {
@@ -7750,6 +7774,9 @@ pub const PackageManager = struct {
.read_file => Output.prettyError("<r><red>error<r> reading lockfile:<r> {s}\n<r>", .{
@errorName(cause.value),
}),
.migrating => Output.prettyError("<r><red>error<r> migrating lockfile:<r> {s}\n<r>", .{
@errorName(cause.value),
}),
}
if (manager.options.enable.fail_early) {

View File

@@ -3,34 +3,27 @@ const strings = @import("../string_immutable.zig");
const Crypto = @import("../sha.zig").Hashers;
pub const Integrity = extern struct {
// this is zeroed like this to work around a comptime issue.
const empty_digest_buf: [Integrity.digest_buf_len]u8 = [_]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
tag: Tag = Tag.unknown,
/// Possibly a [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) value initially
/// We transform it though.
value: [digest_buf_len]u8 = undefined,
value: [digest_buf_len]u8 = empty_digest_buf,
const Base64 = std.base64.standard_no_pad;
pub const digest_buf_len: usize = brk: {
const values = [_]usize{
std.crypto.hash.Sha1.digest_length,
std.crypto.hash.sha2.Sha512.digest_length,
std.crypto.hash.sha2.Sha256.digest_length,
std.crypto.hash.sha2.Sha384.digest_length,
};
var value: usize = 0;
for (values) |val| {
value = @max(val, value);
}
break :brk value;
};
pub const digest_buf_len: usize = @max(
std.crypto.hash.Sha1.digest_length,
std.crypto.hash.sha2.Sha512.digest_length,
std.crypto.hash.sha2.Sha256.digest_length,
std.crypto.hash.sha2.Sha384.digest_length,
);
pub fn parseSHASum(buf: []const u8) !Integrity {
if (buf.len == 0) {
return Integrity{
.tag = Tag.unknown,
.value = undefined,
};
}
@@ -40,8 +33,11 @@ pub const Integrity = extern struct {
var out_i: usize = 0;
var i: usize = 0;
{
@memset(&integrity.value, 0);
// initializer should zero it out
if (comptime @import("root").bun.Environment.allow_assert) {
for (integrity.value) |c| {
std.debug.assert(c == 0);
}
}
while (i < end) {
@@ -74,23 +70,20 @@ pub const Integrity = extern struct {
if (buf.len < "sha256-".len) {
return Integrity{
.tag = Tag.unknown,
.value = undefined,
};
}
var out: [digest_buf_len]u8 = undefined;
var out: [digest_buf_len]u8 = empty_digest_buf;
const tag = Tag.parse(buf);
if (tag == Tag.unknown) {
return Integrity{
.tag = Tag.unknown,
.value = undefined,
};
}
Base64.Decoder.decode(&out, std.mem.trimRight(u8, buf["sha256-".len..], "=")) catch {
return Integrity{
.tag = Tag.unknown,
.value = undefined,
};
};
@@ -203,4 +196,13 @@ pub const Integrity = extern struct {
unreachable;
}
comptime {
var integrity = Integrity{ .tag = Tag.sha1 };
for (integrity.value) |c| {
if (c != 0) {
@compileError("Integrity buffer is not zeroed");
}
}
}
};

View File

@@ -21,6 +21,7 @@ const json_parser = bun.JSON;
const JSPrinter = bun.js_printer;
const linker = @import("../linker.zig");
const migration = @import("./migration.zig");
const sync = @import("../sync.zig");
const Api = @import("../api/schema.zig").Api;
@@ -92,7 +93,7 @@ const assertNoUninitializedPadding = @import("./padding_checker.zig").assertNoUn
// Serialized data
/// The version of the lockfile format, intended to prevent data corruption for format changes.
format: FormatVersion = .v1,
format: FormatVersion = FormatVersion.current,
meta_hash: MetaHash = zero_hash,
@@ -159,7 +160,7 @@ pub fn isEmpty(this: *const Lockfile) bool {
return this.packages.len == 0 or this.packages.len == 1 or this.packages.get(0).resolutions.len == 0;
}
pub const LoadFromDiskResult = union(Tag) {
pub const LoadFromDiskResult = union(enum) {
not_found: void,
err: struct {
step: Step,
@@ -167,26 +168,30 @@ pub const LoadFromDiskResult = union(Tag) {
},
ok: *Lockfile,
pub const Step = enum { open_file, read_file, parse_file };
pub const Tag = enum {
not_found,
err,
ok,
};
pub const Step = enum { open_file, read_file, parse_file, migrating };
};
pub fn loadFromDisk(this: *Lockfile, allocator: Allocator, log: *logger.Log, filename: stringZ) LoadFromDiskResult {
if (comptime Environment.allow_assert) std.debug.assert(FileSystem.instance_loaded);
var file = std.io.getStdIn();
if (filename.len > 0)
file = std.fs.cwd().openFileZ(filename, .{ .mode = .read_only }) catch |err| {
var file = if (filename.len > 0)
std.fs.cwd().openFileZ(filename, .{ .mode = .read_only }) catch |err| {
return switch (err) {
error.FileNotFound, error.AccessDenied, error.BadPathName => LoadFromDiskResult{ .not_found = {} },
error.FileNotFound => {
// Attempt to load from "package-lock.json", "yarn.lock", etc.
return migration.detectAndLoadOtherLockfile(
this,
allocator,
log,
filename,
);
},
error.AccessDenied, error.BadPathName => LoadFromDiskResult{ .not_found = {} },
else => LoadFromDiskResult{ .err = .{ .step = .open_file, .value = err } },
};
};
}
else
std.io.getStdIn();
defer file.close();
var buf = file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| {
@@ -209,6 +214,10 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log
return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } };
};
if (Environment.allow_assert) {
this.verifyData() catch @panic("lockfile data is corrupt");
}
return LoadFromDiskResult{ .ok = this };
}
@@ -289,6 +298,14 @@ pub const Tree = struct {
};
}
pub fn reload(this: *Iterator, lockfile: *const Lockfile) void {
this.trees = lockfile.buffers.trees.items;
this.dependency_ids = lockfile.buffers.hoisted_dependencies.items;
this.dependencies = lockfile.buffers.dependencies.items;
this.resolutions = lockfile.buffers.resolutions.items;
this.string_buf = lockfile.buffers.string_bytes.items;
}
pub fn nextNodeModulesFolder(this: *Iterator) ?NodeModulesFolder {
if (this.tree_id >= this.trees.len) return null;
@@ -997,6 +1014,9 @@ pub const Printer = struct {
.read_file => Output.prettyErrorln("<r><red>error<r> reading lockfile:<r> {s}", .{
@errorName(cause.value),
}),
.migrating => Output.prettyErrorln("<r><red>error<r> while migrating lockfile:<r> {s}", .{
@errorName(cause.value),
}),
}
if (log.errors > 0) {
switch (Output.enable_ansi_colors) {
@@ -1260,6 +1280,24 @@ pub const Printer = struct {
comptime Writer: type,
writer: Writer,
) !void {
// internal for debugging, print the lockfile as custom json
// limited to debug because we don't want people to rely on this format.
if (Environment.isDebug) {
if (std.os.getenv("JSON")) |_| {
try std.json.stringify(
this.lockfile,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = true,
.emit_nonportable_numbers_as_strings = true,
},
writer,
);
try writer.writeAll("\n");
return;
}
}
try writer.writeAll(
\\# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
\\# yarn lockfile v1
@@ -1416,7 +1454,7 @@ pub const Printer = struct {
var behavior = Behavior.uninitialized;
var dependency_behavior_change_count: u8 = 0;
for (dependencies) |dep| {
if (dep.behavior != behavior) {
if (!dep.behavior.eq(behavior)) {
if (dep.behavior.isOptional()) {
try writer.writeAll(" optionalDependencies:\n");
if (comptime Environment.allow_assert) dependency_behavior_change_count += 1;
@@ -1458,20 +1496,18 @@ pub const Printer = struct {
pub fn verifyData(this: *Lockfile) !void {
std.debug.assert(this.format == Lockfile.FormatVersion.current);
{
var i: usize = 0;
while (i < this.packages.len) : (i += 1) {
const package: Lockfile.Package = this.packages.get(i);
std.debug.assert(this.str(&package.name).len == @as(usize, package.name.len()));
std.debug.assert(String.Builder.stringHash(this.str(&package.name)) == @as(usize, package.name_hash));
std.debug.assert(package.dependencies.get(this.buffers.dependencies.items).len == @as(usize, package.dependencies.len));
std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.resolutions.len));
std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.dependencies.len));
const dependencies = package.dependencies.get(this.buffers.dependencies.items);
for (dependencies) |dependency| {
std.debug.assert(this.str(&dependency.name).len == @as(usize, dependency.name.len()));
std.debug.assert(String.Builder.stringHash(this.str(&dependency.name)) == dependency.name_hash);
}
var i: usize = 0;
while (i < this.packages.len) : (i += 1) {
const package: Lockfile.Package = this.packages.get(i);
std.debug.assert(this.str(&package.name).len == @as(usize, package.name.len()));
std.debug.assert(String.Builder.stringHash(this.str(&package.name)) == @as(usize, package.name_hash));
std.debug.assert(package.dependencies.get(this.buffers.dependencies.items).len == @as(usize, package.dependencies.len));
std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.resolutions.len));
std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.dependencies.len));
const dependencies = package.dependencies.get(this.buffers.dependencies.items);
for (dependencies) |dependency| {
std.debug.assert(this.str(&dependency.name).len == @as(usize, dependency.name.len()));
std.debug.assert(String.Builder.stringHash(this.str(&dependency.name)) == dependency.name_hash);
}
}
}
@@ -1688,7 +1724,7 @@ pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Pack
fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package {
defer {
if (comptime Environment.isDebug) {
if (comptime Environment.allow_assert) {
std.debug.assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null);
}
}
@@ -1850,12 +1886,13 @@ pub const PackageIndex = struct {
};
pub const FormatVersion = enum(u32) {
v0,
v0 = 0,
// bun v0.0.x - bun v0.1.6
v1,
v1 = 1,
// bun v0.1.7+
// This change added tarball URLs to npm-resolved packages
v2,
v2 = 2,
_,
pub const current = FormatVersion.v2;
};
@@ -1875,7 +1912,7 @@ pub const Package = extern struct {
name: String = .{},
name_hash: PackageNameHash = 0,
/// How a package has been resolved
/// How this package has been resolved
/// When .tag is uninitialized, that means the package is not resolved yet.
resolution: Resolution = .{},
@@ -1884,8 +1921,18 @@ pub const Package = extern struct {
/// if resolutions[i] is an invalid package ID, then dependencies[i] is not resolved
dependencies: DependencySlice = .{},
/// The resolved package IDs for the dependencies
resolutions: DependencyIDSlice = .{},
/// The resolved package IDs for this package's dependencies. Instead of storing this
/// on the `Dependency` struct within `.dependencies`, it is stored on the package itself
/// so we can access it faster.
///
/// Each index in this array corresponds to the same index in dependencies.
/// Each value in this array corresponds to the resolved package ID for that dependency.
///
/// So this is how you say "what package ID for lodash does this package actually resolve to?"
///
/// By default, the underlying buffer is filled with "invalid_id" to indicate this package ID
/// was not resolved
resolutions: PackageIDSlice = .{},
meta: Meta = .{},
bin: Bin = .{},
@@ -2023,11 +2070,11 @@ pub const Package = extern struct {
field: string,
behavior: Behavior,
pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.normal)) };
pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.dev)) };
pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.optional)) };
pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.peer)) };
pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @as(Behavior, @enumFromInt(Behavior.workspace)) };
pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = Behavior.normal };
pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = Behavior.dev };
pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = Behavior.optional };
pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = Behavior.peer };
pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = Behavior.workspace };
};
pub inline fn isDisabled(this: *const Lockfile.Package) bool {
@@ -3425,15 +3472,7 @@ pub const Package = extern struct {
return error.InvalidPackageJSON;
}
for (obj.properties.slice()) |item| {
const key = item.key.?.asString(allocator) orelse {
log.addErrorFmt(&source, item.key.?.loc, allocator,
\\{0s} expects a map of specifiers, e.g.
\\"{0s}": {{
\\ "bun": "latest"
\\}}
, .{group.prop}) catch {};
return error.InvalidPackageJSON;
};
const key = item.key.?.asString(allocator).?;
const value = item.value.?.asString(allocator) orelse {
log.addErrorFmt(&source, item.value.?.loc, allocator,
\\{0s} expects a map of specifiers, e.g.
@@ -3732,15 +3771,19 @@ pub const Package = extern struct {
string_builder.clamp();
}
pub const List = std.MultiArrayList(Lockfile.Package);
pub const List = bun.MultiArrayList(Lockfile.Package);
pub const Meta = extern struct {
// TODO: when we bump the lockfile version, we should reorder this to:
// id(32), arch(16), os(16), id(8), man_dir(8), integrity(72 align 8)
// should allow us to remove padding bytes
// TODO: remove origin. it doesnt do anything and can be inferred from the resolution
origin: Origin = Origin.npm,
_padding_origin: u8 = 0,
arch: Npm.Architecture = Npm.Architecture.all,
os: Npm.OperatingSystem = Npm.OperatingSystem.all,
_padding_os: u16 = 0,
id: PackageID = invalid_package_id,
@@ -3759,11 +3802,14 @@ pub const Package = extern struct {
}
pub fn clone(this: *const Meta, id: PackageID, buf: []const u8, comptime StringBuilderType: type, builder: StringBuilderType) Meta {
var new = this.*;
new.id = id;
new.man_dir = builder.append(String, this.man_dir.slice(buf));
return new;
return Meta{
.id = id,
.man_dir = builder.append(String, this.man_dir.slice(buf)),
.integrity = this.integrity,
.arch = this.arch,
.os = this.os,
.origin = this.origin,
};
}
};
@@ -3840,6 +3886,8 @@ pub const Package = extern struct {
inline for (FieldsEnum.fields) |field| {
const value = sliced.items(@field(Lockfile.Package.List.Field, field.name));
if (comptime Environment.allow_assert)
debug("save(\"{s}\") = {d} bytes", .{ field.name, std.mem.sliceAsBytes(value).len });
comptime assertNoUninitializedPadding(@TypeOf(value));
try writer.writeAll(std.mem.sliceAsBytes(value));
@@ -3926,11 +3974,14 @@ pub fn deinit(this: *Lockfile) void {
const Buffers = struct {
trees: Tree.List = .{},
hoisted_dependencies: DependencyIDList = .{},
/// This is the underlying buffer used for the `resolutions` external slices inside of `Package`
/// Should be the same length as `dependencies`
resolutions: PackageIDList = .{},
/// This is the underlying buffer used for the `dependencies` external slices inside of `Package`
dependencies: DependencyList = .{},
/// This is the underlying buffer used for any `Semver.ExternalString` instance in the lockfile
extern_strings: ExternalStringBuffer = .{},
// node_modules_folders: NodeModulesFolderList = NodeModulesFolderList{},
// node_modules_package_ids: PackageIDList = PackageIDList{},
/// This is where all non-inlinable `Semver.String`s are stored.
string_bytes: StringBuffer = .{},
pub fn deinit(this: *Buffers, allocator: Allocator) void {
@@ -4470,7 +4521,7 @@ pub fn hasMetaHashChanged(this: *Lockfile, print_name_version_string: bool) !boo
this.meta_hash = try this.generateMetaHash(print_name_version_string);
return !strings.eqlLong(&previous_meta_hash, &this.meta_hash, false);
}
fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash {
pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash {
if (this.packages.len <= 1)
return zero_hash;
@@ -4600,3 +4651,294 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve
return null;
}
pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependency, res: ?PackageID) !void {
const sb = this.buffers.string_bytes.items;
var buf: [2048]u8 = undefined;
try w.beginObject();
defer w.endObject() catch {};
try w.objectField("literal");
try w.write(dep.version.literal.slice(sb));
try w.objectField(@tagName(dep.version.tag));
switch (dep.version.tag) {
.uninitialized => try w.write(null),
.npm => {
try w.beginObject();
defer w.endObject() catch {};
const info: Dependency.Version.NpmInfo = dep.version.value.npm;
try w.objectField("name");
try w.write(info.name.slice(sb));
try w.objectField("version");
try w.write(try std.fmt.bufPrint(&buf, "{}", .{info.version}));
},
.dist_tag => {
try w.beginObject();
defer w.endObject() catch {};
const info: Dependency.Version.TagInfo = dep.version.value.dist_tag;
try w.objectField("name");
try w.write(info.name.slice(sb));
try w.objectField("tag");
try w.write(info.name.slice(sb));
},
.tarball => {
try w.beginObject();
defer w.endObject() catch {};
const info: Dependency.Version.TarballInfo = dep.version.value.tarball;
try w.objectField(@tagName(info.uri));
try w.write(switch (info.uri) {
inline else => |s| s.slice(sb),
});
try w.objectField("package_name");
try w.write(info.package_name.slice(sb));
},
.folder => {
try w.write(dep.version.value.folder.slice(sb));
},
.symlink => {
try w.write(dep.version.value.symlink.slice(sb));
},
.workspace => {
try w.write(dep.version.value.workspace.slice(sb));
},
.git => {
try w.beginObject();
defer w.endObject() catch {};
const info: Repository = dep.version.value.git;
try w.objectField("owner");
try w.write(info.owner.slice(sb));
try w.objectField("repo");
try w.write(info.repo.slice(sb));
try w.objectField("committish");
try w.write(info.committish.slice(sb));
try w.objectField("resolved");
try w.write(info.resolved.slice(sb));
try w.objectField("package_name");
try w.write(info.package_name.slice(sb));
},
.github => {
try w.beginObject();
defer w.endObject() catch {};
const info: Repository = dep.version.value.github;
try w.objectField("owner");
try w.write(info.owner.slice(sb));
try w.objectField("repo");
try w.write(info.repo.slice(sb));
try w.objectField("committish");
try w.write(info.committish.slice(sb));
try w.objectField("resolved");
try w.write(info.resolved.slice(sb));
try w.objectField("package_name");
try w.write(info.package_name.slice(sb));
},
}
try w.objectField("resolved_id");
try w.write(if (res) |r| if (r == invalid_package_id) null else r else null);
const behavior = try std.fmt.bufPrint(&buf, "{}", .{dep.behavior});
try w.objectField("behavior");
try w.write(behavior);
}
pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
var buf: [2048]u8 = undefined;
const sb = this.buffers.string_bytes.items;
try w.beginObject();
defer w.endObject() catch {};
try w.objectField("format");
try w.write(@tagName(this.format));
try w.objectField("meta_hash");
try w.write(std.fmt.bytesToHex(this.meta_hash, .lower));
{
try w.objectField("package_index");
try w.beginObject();
defer w.endObject() catch {};
var iter = this.package_index.iterator();
while (iter.next()) |it| {
const entry: PackageIndex.Entry = it.value_ptr.*;
const first_id = switch (entry) {
.PackageID => |id| id,
.PackageIDMultiple => |ids| ids.items[0],
};
const name = this.packages.items(.name)[first_id].slice(sb);
try w.objectField(name);
switch (entry) {
.PackageID => |id| try w.write(id),
.PackageIDMultiple => |ids| {
try w.beginArray();
for (ids.items) |id| {
try w.write(id);
}
try w.endArray();
},
}
}
}
{
try w.objectField("packages");
try w.beginArray();
defer w.endArray() catch {};
for (0..this.packages.len) |i| {
const pkg: Package = this.packages.get(i);
try w.beginObject();
defer w.endObject() catch {};
try w.objectField("id");
try w.write(i);
try w.objectField("name");
try w.write(pkg.name.slice(sb));
try w.objectField("name_hash");
try w.write(pkg.name_hash);
try w.objectField("resolution");
if (pkg.resolution.tag == .uninitialized) {
try w.write(null);
} else {
const b = try std.fmt.bufPrint(&buf, "{s} {s}", .{ @tagName(pkg.resolution.tag), pkg.resolution.fmt(sb) });
try w.write(b);
}
try w.objectField("dependencies");
{
try w.beginObject();
defer w.endObject() catch {};
for (pkg.dependencies.get(this.buffers.dependencies.items), pkg.resolutions.get(this.buffers.resolutions.items)) |dep_, res| {
const dep: Dependency = dep_;
try w.objectField(dep.name.slice(sb));
try this.jsonStringifyDependency(w, dep, res);
}
}
if (@as(u16, @intFromEnum(pkg.meta.arch)) != Npm.Architecture.all_value) {
try w.objectField("arch");
try w.beginArray();
defer w.endArray() catch {};
for (Npm.Architecture.NameMap.kvs) |kv| {
if (pkg.meta.arch.has(kv.value)) {
try w.write(kv.key);
}
}
}
if (@as(u16, @intFromEnum(pkg.meta.os)) != Npm.OperatingSystem.all_value) {
try w.objectField("os");
try w.beginArray();
defer w.endArray() catch {};
for (Npm.OperatingSystem.NameMap.kvs) |kv| {
if (pkg.meta.os.has(kv.value)) {
try w.write(kv.key);
}
}
}
try w.objectField("integrity");
if (pkg.meta.integrity.tag != .unknown) {
try w.write(try std.fmt.bufPrint(&buf, "{}", .{pkg.meta.integrity}));
} else {
try w.write(null);
}
try w.objectField("man_dir");
try w.write(pkg.meta.man_dir.slice(sb));
try w.objectField("origin");
try w.write(@tagName(pkg.meta.origin));
try w.objectField("bin");
switch (pkg.bin.tag) {
.none => try w.write(null),
.file => {
try w.beginObject();
defer w.endObject() catch {};
try w.objectField("file");
try w.write(pkg.bin.value.file.slice(sb));
},
.named_file => {
try w.beginObject();
defer w.endObject() catch {};
try w.objectField("name");
try w.write(pkg.bin.value.named_file[0].slice(sb));
try w.objectField("file");
try w.write(pkg.bin.value.named_file[1].slice(sb));
},
.dir => {
try w.objectField("dir");
try w.write(pkg.bin.value.dir.slice(sb));
},
.map => {
try w.beginObject();
defer w.endObject() catch {};
const data: []const ExternalString = pkg.bin.value.map.get(this.buffers.extern_strings.items);
var bin_i: usize = 0;
while (bin_i < data.len) : (bin_i += 2) {
try w.objectField(data[bin_i].slice(sb));
try w.write(data[bin_i + 1].slice(sb));
}
},
}
{
try w.objectField("scripts");
try w.beginObject();
defer w.endObject() catch {};
inline for (comptime std.meta.fieldNames(Lockfile.Scripts)) |field_name| {
var script = @field(pkg.scripts, field_name).slice(sb);
if (script.len > 0) {
try w.objectField(field_name);
try w.write(script);
}
}
}
}
}
try w.objectField("workspace_paths");
{
try w.beginObject();
defer w.endObject() catch {};
for (this.workspace_paths.keys(), this.workspace_paths.values()) |k, v| {
try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k}));
try w.write(v.slice(sb));
}
}
try w.objectField("workspace_versions");
{
try w.beginObject();
defer w.endObject() catch {};
for (this.workspace_versions.keys(), this.workspace_versions.values()) |k, v| {
try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k}));
try w.write(try std.fmt.bufPrint(&buf, "{}", .{v.fmt(sb)}));
}
}
}

947
src/install/migration.zig Normal file
View File

@@ -0,0 +1,947 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const logger = bun.logger;
const Install = @import("./install.zig");
const Resolution = @import("./resolution.zig").Resolution;
const Dependency = @import("./dependency.zig");
const VersionedURL = @import("./versioned_url.zig");
const Npm = @import("./npm.zig");
const Integrity = @import("./integrity.zig").Integrity;
const Bin = @import("./bin.zig").Bin;
const Semver = @import("./semver.zig");
const String = Semver.String;
const ExternalString = Semver.ExternalString;
const stringHash = String.Builder.stringHash;
const Lockfile = @import("./lockfile.zig");
const LoadFromDiskResult = Lockfile.LoadFromDiskResult;
const JSAst = bun.JSAst;
const Expr = JSAst.Expr;
const B = JSAst.B;
const E = JSAst.E;
const G = JSAst.G;
const S = JSAst.S;
const debug = Output.scoped(.migrate, false);
pub fn detectAndLoadOtherLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Log, bun_lockfile_path: stringZ) LoadFromDiskResult {
const dirname = bun_lockfile_path[0 .. strings.lastIndexOfChar(bun_lockfile_path, '/') orelse 0];
// check for package-lock.json, yarn.lock, etc...
// if it exists, do an in-memory migration
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
@memcpy(buf[0..dirname.len], dirname);
const cwd = std.fs.cwd();
npm: {
const npm_lockfile_name = "package-lock.json";
@memcpy(buf[dirname.len .. dirname.len + npm_lockfile_name.len], npm_lockfile_name);
buf[dirname.len + npm_lockfile_name.len] = 0;
const lockfile_path = buf[0 .. dirname.len + npm_lockfile_name.len :0];
var timer = std.time.Timer.start() catch unreachable;
const file = cwd.openFileZ(lockfile_path, .{ .mode = .read_only }) catch break :npm;
defer file.close();
var data = file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| {
return LoadFromDiskResult{ .err = .{ .step = .migrating, .value = err } };
};
const lockfile = migrateNPMLockfile(this, allocator, log, data, lockfile_path) catch |err| {
if (err == error.NPMLockfileVersionMismatch) {
Output.prettyErrorln(
\\<red><b>error<r><d>:<r> Please upgrade package-lock.json to lockfileVersion 3
\\
\\Run 'npm i --lockfile-version 3 --frozen-lockfile' to upgrade your lockfile without changing dependencies.
, .{});
Global.exit(1);
}
if (Environment.allow_assert) {
const maybe_trace = @errorReturnTrace();
Output.prettyErrorln("Error: {s}", .{@errorName(err)});
log.printForLogLevel(Output.errorWriter()) catch {};
if (maybe_trace) |trace| {
std.debug.dumpStackTrace(trace.*);
}
Output.prettyErrorln("Invalid NPM package-lock.json\nIn a release build, this would ignore and do a fresh install.\nAborting", .{});
Global.exit(1);
}
return LoadFromDiskResult{ .err = .{ .step = .migrating, .value = err } };
};
if (lockfile == .ok) {
Output.printElapsed(@as(f64, @floatFromInt(timer.read())) / std.time.ns_per_ms);
Output.prettyError(" ", .{});
Output.prettyErrorln("<d>migrated lockfile from <r><green>package-lock.json<r>", .{});
Output.flush();
}
return lockfile;
}
return LoadFromDiskResult{ .not_found = {} };
}
const IdMap = std.StringHashMapUnmanaged(IdMapValue);
const IdMapValue = struct {
/// index into the old package-lock.json package entries.
old_json_index: u32,
/// this is the new package id for the bun lockfile
///
/// - if this new_package_id is set to `package_id_is_link`, it means it's a link
/// and to get the actual package id, you need to lookup `.resolved` in the hashmap.
/// - if it is `package_id_is_bundled`, it means it's a bundled dependency that was not
/// marked by npm, which can happen to some transitive dependencies.
new_package_id: u32,
};
const package_id_is_link = std.math.maxInt(u32);
const package_id_is_bundled = std.math.maxInt(u32) - 1;
const unset_package_id = Install.invalid_package_id - 1;
const dependency_keys = .{
.dependencies,
.devDependencies,
.peerDependencies,
.optionalDependencies,
};
pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Log, data: string, path: string) !LoadFromDiskResult {
debug("begin lockfile migration", .{});
try this.initEmpty(allocator);
Install.initializeStore();
const json_src = logger.Source.initPathString(path, data);
const json = bun.JSON.ParseJSONUTF8(&json_src, log, allocator) catch return error.InvalidNPMLockfile;
if (json.data != .e_object) {
return error.InvalidNPMLockfile;
}
if (json.get("lockfileVersion")) |version| {
if (!(version.data == .e_number and version.data.e_number.value == 3)) {
return error.NPMLockfileVersionMismatch;
}
} else {
return error.InvalidNPMLockfile;
}
// Count pass
var builder_ = this.stringBuilder();
var builder = &builder_;
const name = (if (json.get("name")) |expr| expr.asString(allocator) else null) orelse "";
builder.count(name);
var root_package: *E.Object = undefined;
var packages_properties = brk: {
const obj = json.get("packages") orelse return error.InvalidNPMLockfile;
if (obj.data != .e_object) return error.InvalidNPMLockfile;
if (obj.data.e_object.properties.len == 0) return error.InvalidNPMLockfile;
const prop1 = obj.data.e_object.properties.at(0);
if (prop1.key) |k| {
if (k.data != .e_string) return error.InvalidNPMLockfile;
// first key must be the "", self reference
if (k.data.e_string.data.len != 0) return error.InvalidNPMLockfile;
if (prop1.value.?.data != .e_object) return error.InvalidNPMLockfile;
root_package = prop1.value.?.data.e_object;
} else return error.InvalidNPMLockfile;
break :brk obj.data.e_object.properties;
};
var num_deps: u32 = 0;
const workspace_map: ?Lockfile.Package.WorkspaceMap = workspace_map: {
if (root_package.get("workspaces")) |wksp| {
var workspaces = Lockfile.Package.WorkspaceMap.init(allocator);
const json_array = switch (wksp.data) {
.e_array => |arr| arr,
.e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) {
.e_array => |arr| arr,
else => return error.InvalidNPMLockfile,
} else return error.InvalidNPMLockfile,
else => return error.InvalidNPMLockfile,
};
const workspace_packages_count = try Lockfile.Package.processWorkspaceNamesArray(
&workspaces,
allocator,
log,
json_array,
&json_src,
wksp.loc,
builder,
);
debug("found {d} workspace packages", .{workspace_packages_count});
num_deps += workspace_packages_count;
break :workspace_map workspaces;
}
break :workspace_map null;
};
// Counting Phase
// This "IdMap" is used to make object key lookups faster for the `packages` object
// it also lets us resolve linked and bundled packages.
var id_map = IdMap{};
try id_map.ensureTotalCapacity(allocator, packages_properties.len);
var num_extern_strings: u32 = 0;
var package_idx: u32 = 0;
for (packages_properties.slice(), 0..) |entry, i| {
const pkg_path = entry.key.?.asString(allocator).?;
if (entry.value.?.data != .e_object)
return error.InvalidNPMLockfile;
const pkg = entry.value.?.data.e_object;
if (pkg.get("link") != null) {
id_map.putAssumeCapacity(
pkg_path,
IdMapValue{
.old_json_index = @truncate(i),
.new_package_id = package_id_is_link,
},
);
continue;
}
if (pkg.get("inBundle")) |x| if (x.data == .e_boolean and x.data.e_boolean.value) {
id_map.putAssumeCapacity(
pkg_path,
IdMapValue{
.old_json_index = @truncate(i),
.new_package_id = package_id_is_bundled,
},
);
continue;
};
if (pkg.get("extraneous")) |x| if (x.data == .e_boolean and x.data.e_boolean.value) {
continue;
};
id_map.putAssumeCapacity(
pkg_path,
IdMapValue{
.old_json_index = @truncate(i),
.new_package_id = package_idx,
},
);
package_idx += 1;
inline for (dependency_keys) |dep_key| {
if (pkg.get(@tagName(dep_key))) |deps| {
if (deps.data != .e_object) {
return error.InvalidNPMLockfile;
}
num_deps +|= @as(u32, deps.data.e_object.properties.len);
for (deps.data.e_object.properties.slice()) |dep| {
const dep_name = dep.key.?.asString(allocator).?;
const version_string = dep.value.?.asString(allocator) orelse return error.InvalidNPMLockfile;
builder.count(dep_name);
builder.count(version_string);
// If it's a folder or workspace, pessimistically assume we will need a maximum path
switch (Dependency.Version.Tag.infer(version_string)) {
.folder, .workspace => builder.cap += bun.MAX_PATH_BYTES,
else => {},
}
}
}
}
if (pkg.get("bin")) |bin| {
if (bin.data != .e_object) return error.InvalidNPMLockfile;
switch (bin.data.e_object.properties.len) {
0 => return error.InvalidNPMLockfile,
1 => {
const first_bin = bin.data.e_object.properties.at(0);
const key = first_bin.key.?.asString(allocator).?;
const workspace_entry = if (workspace_map) |map| map.map.get(pkg_path) else null;
const is_workspace = workspace_entry != null;
const pkg_name = if (is_workspace)
workspace_entry.?.name
else if (entry.value.?.get("name")) |set_name|
(set_name.asString(this.allocator) orelse return error.InvalidNPMLockfile)
else
packageNameFromPath(pkg_path);
if (!strings.eql(key, pkg_name)) {
builder.count(key);
}
builder.count(first_bin.value.?.asString(allocator) orelse return error.InvalidNPMLockfile);
},
else => {
for (bin.data.e_object.properties.slice()) |bin_entry| {
builder.count(bin_entry.key.?.asString(allocator).?);
builder.count(bin_entry.value.?.asString(allocator) orelse return error.InvalidNPMLockfile);
}
num_extern_strings += @truncate(bin.data.e_object.properties.len * 2);
},
}
}
if (pkg.get("resolved")) |resolved_expr| {
const resolved = resolved_expr.asString(allocator) orelse return error.InvalidNPMLockfile;
if (strings.hasPrefixComptime(resolved, "file:")) {
builder.count(resolved[5..]);
} else if (strings.hasPrefixComptime(resolved, "git+")) {
builder.count(resolved[4..]);
} else {
builder.count(resolved);
// this is over-counting but whatever. it would be too hard to determine if the case here
// is an `npm`/`dist_tag` version (the only times this is actually used)
if (pkg.get("version")) |v| if (v.asString(allocator)) |s| {
builder.count(s);
};
}
} else {
builder.count(pkg_path);
}
}
if (num_deps == std.math.maxInt(u32)) return error.InvalidNPMLockfile; // lol
debug("counted {d} dependencies", .{num_deps});
debug("counted {d} extern strings", .{num_extern_strings});
debug("counted {d} packages", .{package_idx});
try this.buffers.dependencies.ensureTotalCapacity(allocator, num_deps);
try this.buffers.resolutions.ensureTotalCapacity(allocator, num_deps);
try this.buffers.extern_strings.ensureTotalCapacity(allocator, num_extern_strings);
try this.packages.ensureTotalCapacity(allocator, package_idx);
// The package index is overallocated, but we know the upper bound
try this.package_index.ensureTotalCapacity(package_idx);
try builder.allocate();
if (workspace_map) |wksp| {
try this.workspace_paths.ensureTotalCapacity(allocator, wksp.map.unmanaged.entries.len);
try this.workspace_versions.ensureTotalCapacity(allocator, wksp.map.unmanaged.entries.len);
for (wksp.map.keys(), wksp.map.values()) |k, v| {
const name_hash = stringHash(v.name);
this.workspace_paths.putAssumeCapacity(name_hash, builder.append(String, k));
if (v.version) |version| this.workspace_versions.putAssumeCapacity(name_hash, version);
}
}
// Package Building Phase
// This initializes every package and sets the resolution to uninitialized
for (packages_properties.slice()) |entry| {
// this pass is allowed to make more assumptions because we already checked things during
// the counting pass
const pkg = entry.value.?.data.e_object;
if (pkg.get("link") != null or if (pkg.get("inBundle") orelse pkg.get("extraneous")) |x| x.data == .e_boolean and x.data.e_boolean.value else false) continue;
const pkg_path = entry.key.?.asString(allocator).?;
const workspace_entry = if (workspace_map) |map| map.map.get(pkg_path) else null;
const is_workspace = workspace_entry != null;
const pkg_name = if (is_workspace)
workspace_entry.?.name
else if (pkg.get("name")) |set_name|
(set_name.asString(this.allocator) orelse unreachable)
else
packageNameFromPath(pkg_path);
const name_hash = stringHash(pkg_name);
const package_id: Install.PackageID = @intCast(this.packages.len);
if (Environment.allow_assert) {
// If this is false, then it means we wrote wrong resolved ids
// During counting phase we assign all the packages an id.
std.debug.assert(package_id == id_map.get(pkg_path).?.new_package_id);
}
// Instead of calling this.appendPackage, manually append
// the other function has some checks that will fail since we have not set resolution+dependencies yet.
this.packages.appendAssumeCapacity(Lockfile.Package{
.name = builder.appendWithHash(String, pkg_name, name_hash),
.name_hash = name_hash,
// For non workspace packages these are set to .uninitialized, then in the third phase
// they are resolved. This is because the resolution uses the dependant's version
// specifier as a "hint" to resolve the dependency.
.resolution = if (is_workspace) Resolution.init(.{
// This string is counted by `processWorkspaceNamesArray`
.workspace = builder.append(String, pkg_path),
}) else Resolution{},
// we fill this data in later
.dependencies = undefined,
.resolutions = undefined,
.meta = .{
.id = package_id,
.origin = if (package_id == 0) .local else .npm,
.arch = if (pkg.get("cpu")) |cpu_array| arch: {
if (cpu_array.data != .e_array) return error.InvalidNPMLockfile;
var arch: Npm.Architecture = .none;
for (cpu_array.data.e_array.items.slice()) |item| {
if (item.data != .e_string) return error.InvalidNPMLockfile;
arch = arch.apply(item.data.e_string.data);
}
break :arch arch;
} else .all,
.os = if (pkg.get("os")) |cpu_array| arch: {
if (cpu_array.data != .e_array) return error.InvalidNPMLockfile;
var os: Npm.OperatingSystem = .none;
for (cpu_array.data.e_array.items.slice()) |item| {
if (item.data != .e_string) return error.InvalidNPMLockfile;
os = os.apply(item.data.e_string.data);
}
break :arch os;
} else .all,
.man_dir = String{},
.integrity = if (pkg.get("integrity")) |integrity|
try Integrity.parse(
integrity.asString(this.allocator) orelse
return error.InvalidNPMLockfile,
)
else
Integrity{},
},
.bin = if (pkg.get("bin")) |bin| bin: {
// we already check these conditions during counting
std.debug.assert(bin.data == .e_object);
std.debug.assert(bin.data.e_object.properties.len > 0);
// in npm lockfile, the bin is always an object, even if it is only a single one
// we need to detect if it's a single entry and lower it to a file.
if (bin.data.e_object.properties.len == 1) {
const prop = bin.data.e_object.properties.at(0);
const key = prop.key.?.asString(this.allocator) orelse return error.InvalidNPMLockfile;
const script_value = prop.value.?.asString(this.allocator) orelse return error.InvalidNPMLockfile;
if (strings.eql(key, pkg_name)) {
break :bin .{
.tag = .file,
.value = Bin.Value.init(.{
.file = builder.append(String, script_value),
}),
};
}
break :bin .{
.tag = .named_file,
.value = Bin.Value.init(.{
.named_file = .{
builder.append(String, key),
builder.append(String, script_value),
},
}),
};
}
const view: Install.ExternalStringList = .{
.off = @truncate(this.buffers.extern_strings.items.len),
.len = @intCast(bin.data.e_object.properties.len * 2),
};
for (bin.data.e_object.properties.slice()) |bin_entry| {
const key = bin_entry.key.?.asString(this.allocator) orelse return error.InvalidNPMLockfile;
const script_value = bin_entry.value.?.asString(this.allocator) orelse return error.InvalidNPMLockfile;
this.buffers.extern_strings.appendAssumeCapacity(builder.append(ExternalString, key));
this.buffers.extern_strings.appendAssumeCapacity(builder.append(ExternalString, script_value));
}
if (Environment.allow_assert) {
std.debug.assert(this.buffers.extern_strings.items.len == view.off + view.len);
std.debug.assert(this.buffers.extern_strings.items.len <= this.buffers.extern_strings.capacity);
}
break :bin .{
.tag = .map,
.value = Bin.Value.init(.{
.map = view,
}),
};
} else Bin.init(),
.scripts = .{},
});
if (is_workspace) {
std.debug.assert(package_id != 0); // root package should not be in it's own workspace
// we defer doing getOrPutID for non-workspace packages because it depends on the resolution being set.
try this.getOrPutID(package_id, name_hash);
}
}
if (Environment.allow_assert) {
std.debug.assert(this.packages.len == package_idx);
}
// ignoring length check because we pre-allocated it. the length may shrink later
// so it's faster if we ignore the underlying length buffer and just assign it at the very end.
var dependencies_buf = this.buffers.dependencies.items.ptr[0..num_deps];
var resolutions_buf = this.buffers.resolutions.items.ptr[0..num_deps];
// pre-initialize the dependencies and resolutions to `unset_package_id`
if (Environment.allow_assert) {
@memset(dependencies_buf, Dependency{});
@memset(resolutions_buf, unset_package_id);
}
var resolutions = this.packages.items(.resolution);
var metas = this.packages.items(.meta);
var dependencies_list = this.packages.items(.dependencies);
var resolution_list = this.packages.items(.resolutions);
if (Environment.allow_assert) {
for (resolutions) |r| {
std.debug.assert(r.tag == .uninitialized or r.tag == .workspace);
}
}
// Root resolution isn't hit through dependency tracing.
resolutions[0] = Resolution.init(.{ .root = {} });
metas[0].origin = .local;
try this.getOrPutID(0, this.packages.items(.name_hash)[0]);
// made it longer than max path just in case something stupid happens
var name_checking_buf: [bun.MAX_PATH_BYTES * 2]u8 = undefined;
// Dependency Linking Phase
package_idx = 0;
var is_first = true;
for (packages_properties.slice()) |entry| {
// this pass is allowed to make more assumptions because we already checked things during
// the counting pass
const pkg = entry.value.?.data.e_object;
if (pkg.get("link") != null or if (pkg.get("inBundle") orelse pkg.get("extraneous")) |x| x.data == .e_boolean and x.data.e_boolean.value else false) continue;
const pkg_path = entry.key.?.asString(allocator).?;
const dependencies_start = dependencies_buf.ptr;
const resolutions_start = resolutions_buf.ptr;
// this is in a defer because there are two places we end this loop iteration at.
defer {
if (dependencies_start == dependencies_buf.ptr) {
dependencies_list[package_idx] = .{ .len = 0 };
resolution_list[package_idx] = .{ .len = 0 };
} else {
// Calculate the offset + length by pointer arithmetic
const len: u32 = @truncate((@intFromPtr(resolutions_buf.ptr) - @intFromPtr(resolutions_start)) / @sizeOf(Install.PackageID));
if (Environment.allow_assert) {
std.debug.assert(len > 0);
std.debug.assert(len == ((@intFromPtr(dependencies_buf.ptr) - @intFromPtr(dependencies_start)) / @sizeOf(Dependency)));
}
dependencies_list[package_idx] = .{
.off = @truncate((@intFromPtr(dependencies_start) - @intFromPtr(this.buffers.dependencies.items.ptr)) / @sizeOf(Dependency)),
.len = len,
};
resolution_list[package_idx] = .{
.off = @truncate((@intFromPtr(resolutions_start) - @intFromPtr(this.buffers.resolutions.items.ptr)) / @sizeOf(Install.PackageID)),
.len = len,
};
}
package_idx += 1;
}
// a feature no one has heard about: https://docs.npmjs.com/cli/v10/configuring-npm/package-json#bundledependencies
const bundled_dependencies = if (pkg.get("bundleDependencies") orelse pkg.get("bundledDependencies")) |expr| deps: {
if (expr.data == .e_boolean) {
if (expr.data.e_boolean.value) continue;
break :deps null;
}
if (expr.data != .e_array) return error.InvalidNPMLockfile;
const arr: *E.Array = expr.data.e_array;
var map = std.StringArrayHashMapUnmanaged(void){};
try map.ensureTotalCapacity(allocator, arr.items.len);
for (arr.items.slice()) |item| {
map.putAssumeCapacity(item.asString(allocator) orelse return error.InvalidNPMLockfile, {});
}
break :deps map;
} else null;
if (is_first) {
is_first = false;
if (workspace_map) |wksp| {
for (wksp.keys(), wksp.values()) |key, value| {
const entry1 = id_map.get(key) orelse return error.InvalidNPMLockfile;
const name_hash = stringHash(value.name);
const wksp_name = builder.append(String, value.name);
const wksp_path = builder.append(String, key);
dependencies_buf[0] = Dependency{
.name = wksp_name,
.name_hash = name_hash,
.version = .{
.tag = .workspace,
.literal = wksp_path,
.value = .{
.workspace = wksp_path,
},
},
.behavior = .{
.workspace = true,
},
};
resolutions_buf[0] = entry1.new_package_id;
dependencies_buf = dependencies_buf[1..];
resolutions_buf = resolutions_buf[1..];
}
}
}
inline for (dependency_keys) |dep_key| {
if (pkg.get(@tagName(dep_key))) |deps| {
// fetch the peerDependenciesMeta if it exists
// this is only done for peerDependencies, obviously
const peer_dep_meta = if (dep_key == .peerDependencies)
if (pkg.get("peerDependenciesMeta")) |expr| peer_dep_meta: {
if (expr.data != .e_object) return error.InvalidNPMLockfile;
break :peer_dep_meta expr.data.e_object;
} else null
else
void{};
if (deps.data != .e_object) return error.InvalidNPMLockfile;
const properties = deps.data.e_object.properties;
dep_loop: for (properties.slice()) |prop| {
const name_bytes = prop.key.?.asString(this.allocator).?;
if (bundled_dependencies != null and bundled_dependencies.?.getIndex(name_bytes) != null) continue :dep_loop;
const version_bytes = prop.value.?.asString(this.allocator) orelse return error.InvalidNPMLockfile;
const name_hash = stringHash(name_bytes);
const dep_name = builder.appendWithHash(String, name_bytes, name_hash);
const dep_version = builder.append(String, version_bytes);
const sliced = dep_version.sliced(this.buffers.string_bytes.items);
debug("parsing {s}, {s}\n", .{ name_bytes, version_bytes });
const version = Dependency.parse(
this.allocator,
dep_name,
sliced.slice,
&sliced,
log,
) orelse {
return error.InvalidNPMLockfile;
};
debug("-> {s}, {}\n", .{ @tagName(version.tag), version.value });
if (Environment.allow_assert) {
std.debug.assert(version.tag != .uninitialized);
}
const str_node_modules = if (pkg_path.len == 0) "node_modules/" else "/node_modules/";
const suffix_len = str_node_modules.len + name_bytes.len;
var buf_len: u32 = @as(u32, @intCast(pkg_path.len + suffix_len));
if (buf_len > name_checking_buf.len) {
return error.PathTooLong;
}
bun.copy(u8, name_checking_buf[0..pkg_path.len], pkg_path);
bun.copy(u8, name_checking_buf[pkg_path.len .. pkg_path.len + str_node_modules.len], str_node_modules);
bun.copy(u8, name_checking_buf[pkg_path.len + str_node_modules.len .. pkg_path.len + suffix_len], name_bytes);
while (true) {
debug("checking {s}", .{name_checking_buf[0..buf_len]});
if (id_map.get(name_checking_buf[0..buf_len])) |found_| {
var found = found_;
if (found.new_package_id == package_id_is_link) {
// it is a workspace package, resolve from the "link": true entry to the real entry.
const ref_pkg = packages_properties.at(found.old_json_index).value.?.data.e_object;
// the `else` here is technically possible to hit
const resolved_v = ref_pkg.get("resolved") orelse return error.LockfileWorkspaceMissingResolved;
const resolved = resolved_v.asString(this.allocator) orelse return error.InvalidNPMLockfile;
found = (id_map.get(resolved) orelse return error.InvalidNPMLockfile);
} else if (found.new_package_id == package_id_is_bundled) {
debug("skipping bundled dependency {s}", .{name_bytes});
continue :dep_loop;
}
const id = found.new_package_id;
var is_workspace = resolutions[id].tag == .workspace;
dependencies_buf[0] = Dependency{
.name = dep_name,
.name_hash = name_hash,
.version = version,
.behavior = .{
.normal = dep_key == .dependencies,
.optional = dep_key == .optionalDependencies,
.dev = dep_key == .devDependencies,
.peer = dep_key == .peerDependencies,
.workspace = is_workspace,
},
};
resolutions_buf[0] = id;
dependencies_buf = dependencies_buf[1..];
resolutions_buf = resolutions_buf[1..];
// If the package resolution is not set, resolve the target package
// using the information we have from the dependency declaration.
if (resolutions[id].tag == .uninitialized) {
debug("resolving '{s}'", .{name_bytes});
const res = resolved: {
const dep_pkg = packages_properties.at(found.old_json_index).value.?.data.e_object;
const npm_resolution = dep_pkg.get("resolved") orelse {
break :resolved Resolution.init(.{
.folder = builder.append(
String,
packages_properties.at(found.old_json_index).key.?.asString(allocator).?,
),
});
};
const dep_resolved = npm_resolution.asString(this.allocator) orelse return error.InvalidNPMLockfile;
break :resolved switch (version.tag) {
.uninitialized => std.debug.panic("Version string {s} resolved to `.uninitialized`", .{version_bytes}),
.npm, .dist_tag => res: {
// It is theoretically possible to hit this in a case where the resolved dependency is NOT
// an npm dependency, but that case is so convoluted that it is not worth handling.
//
// Deleting 'package-lock.json' would completely break the installation of the project.
//
// We assume that the given URL is to *some* npm registry, or the resolution is to a workspace package.
// If it is a workspace package, then this branch will not be hit as the resolution was already set earlier.
const dep_actual_version = (dep_pkg.get("version") orelse return error.InvalidNPMLockfile)
.asString(this.allocator) orelse return error.InvalidNPMLockfile;
const dep_actual_version_str = builder.append(String, dep_actual_version);
const dep_actual_version_sliced = dep_actual_version_str.sliced(this.buffers.string_bytes.items);
break :res Resolution.init(.{
.npm = .{
.url = builder.append(String, dep_resolved),
.version = Semver.Version.parse(dep_actual_version_sliced).version.fill(),
},
});
},
.tarball => if (strings.hasPrefixComptime(dep_resolved, "file:"))
Resolution.init(.{ .local_tarball = builder.append(String, dep_resolved[5..]) })
else
Resolution.init(.{ .remote_tarball = builder.append(String, dep_resolved) }),
.folder => Resolution.init(.{ .folder = builder.append(String, dep_resolved) }),
// not sure if this is possible to hit
.symlink => Resolution.init(.{ .folder = builder.append(String, dep_resolved) }),
.workspace => workspace: {
var input = builder.append(String, dep_resolved).sliced(this.buffers.string_bytes.items);
if (strings.hasPrefixComptime(input.slice, "workspace:")) {
input = input.sub(input.slice["workspace:".len..]);
}
break :workspace Resolution.init(.{
.workspace = input.value(),
});
},
.git => res: {
const str = (if (strings.hasPrefixComptime(dep_resolved, "git+"))
builder.append(String, dep_resolved[4..])
else
builder.append(String, dep_resolved))
.sliced(this.buffers.string_bytes.items);
const hash_index = strings.lastIndexOfChar(str.slice, '#') orelse return error.InvalidNPMLockfile;
const commit = str.sub(str.slice[hash_index + 1 ..]).value();
break :res Resolution.init(.{
.git = .{
.owner = version.value.git.owner,
.repo = str.sub(str.slice[0..hash_index]).value(),
.committish = commit,
.resolved = commit,
.package_name = dep_name,
},
});
},
.github => res: {
const str = (if (strings.hasPrefixComptime(dep_resolved, "git+"))
builder.append(String, dep_resolved[4..])
else
builder.append(String, dep_resolved))
.sliced(this.buffers.string_bytes.items);
const hash_index = strings.lastIndexOfChar(str.slice, '#') orelse return error.InvalidNPMLockfile;
const commit = str.sub(str.slice[hash_index + 1 ..]).value();
break :res Resolution.init(.{
.git = .{
.owner = version.value.github.owner,
.repo = str.sub(str.slice[0..hash_index]).value(),
.committish = commit,
.resolved = commit,
.package_name = dep_name,
},
});
},
};
};
debug("-> {}", .{res.fmtForDebug(this.buffers.string_bytes.items)});
resolutions[id] = res;
metas[id].origin = switch (res.tag) {
// This works?
.root => .local,
else => .npm,
};
try this.getOrPutID(id, this.packages.items(.name_hash)[id]);
}
continue :dep_loop;
}
// step
if (strings.lastIndexOf(name_checking_buf[0..buf_len -| ("node_modules/".len + name_bytes.len)], "node_modules/")) |idx| {
debug("found 'node_modules/' at {d}", .{idx});
buf_len = @intCast(idx + "node_modules/".len + name_bytes.len);
bun.copy(u8, name_checking_buf[idx + "node_modules/".len .. idx + "node_modules/".len + name_bytes.len], name_bytes);
} else if (!strings.hasPrefixComptime(name_checking_buf[0..buf_len], "node_modules/")) {
// this is hit if you start from `packages/etc`, from `packages/etc/node_modules/xyz`
// we need to hit the root node_modules
buf_len = @intCast("node_modules/".len + name_bytes.len);
bun.copy(u8, name_checking_buf[0..buf_len], "node_modules/");
bun.copy(u8, name_checking_buf[buf_len - name_bytes.len .. buf_len], name_bytes);
} else {
// optional peer dependencies can be ... optional
if (dep_key == .peerDependencies) {
if (peer_dep_meta) |o| if (o.get(name_bytes)) |meta| {
if (meta.data != .e_object) return error.InvalidNPMLockfile;
if (meta.data.e_object.get("optional")) |optional| {
if (optional.data != .e_boolean) return error.InvalidNPMLockfile;
if (optional.data.e_boolean.value) {
dependencies_buf[0] = Dependency{
.name = dep_name,
.name_hash = name_hash,
.version = version,
.behavior = .{
.normal = dep_key == .dependencies,
.optional = true,
.dev = dep_key == .devDependencies,
.peer = dep_key == .peerDependencies,
.workspace = false,
},
};
resolutions_buf[0] = Install.invalid_package_id;
dependencies_buf = dependencies_buf[1..];
resolutions_buf = resolutions_buf[1..];
continue :dep_loop;
}
}
};
}
// it is technically possible to get a package-lock.json without a dependency.
// it's very unlikely, but possible. when NPM sees this, it essentially doesnt install the package, and treats it like it doesn't exist.
// in test/cli/install/migrate-fixture, you can observe this for `iconv-lite`
debug("could not find package '{s}' in '{s}'", .{ name_bytes, pkg_path });
continue :dep_loop;
}
}
}
}
}
}
this.buffers.resolutions.items.len = (@intFromPtr(resolutions_buf.ptr) - @intFromPtr(this.buffers.resolutions.items.ptr)) / @sizeOf(Install.PackageID);
this.buffers.dependencies.items.len = this.buffers.resolutions.items.len;
// In allow_assert, we prefill this buffer with uninitialized values that we can detect later
// It is our fault if we hit an error here, making it safe to disable in release.
if (Environment.allow_assert) {
std.debug.assert(this.buffers.dependencies.items.len == (@intFromPtr(dependencies_buf.ptr) - @intFromPtr(this.buffers.dependencies.items.ptr)) / @sizeOf(Dependency));
std.debug.assert(this.buffers.dependencies.items.len <= num_deps);
var crash = false;
for (this.buffers.dependencies.items, 0..) |r, i| {
// 'if behavior is uninitialized'
if (r.behavior.eq(.{})) {
debug("dependency index '{d}' was not set", .{i});
crash = true;
}
}
for (this.buffers.resolutions.items, 0..) |r, i| {
if (r == unset_package_id) {
debug("resolution index '{d}' was not set", .{i});
crash = true;
}
}
if (crash) {
std.debug.panic("Assertion failure, see above", .{});
}
}
// A package not having a resolution, however, is not our fault.
// This can be triggered by a bad lockfile with extra packages. NPM should trim packages out automatically.
var is_missing_resolutions = false;
for (resolutions, 0..) |r, i| {
if (r.tag == .uninitialized) {
Output.printErrorln("Could not resolve package '{s}' in lockfile.", .{this.packages.items(.name)[i].slice(this.buffers.string_bytes.items)});
is_missing_resolutions = true;
} else if (Environment.allow_assert) {
// Assertion from appendPackage. If we do this too early it will always fail as we dont have the resolution written
// but after we write all the data, there is no excuse for this to fail.
//
// If this is hit, it means getOrPutID was not called on this package id. Look for where 'resolution[i]' is set
std.debug.assert(this.getPackageID(this.packages.items(.name_hash)[i], null, &r) != null);
}
}
if (is_missing_resolutions) {
return error.NotAllPackagesGotResolved;
}
// if (Environment.isDebug) {
// const dump_file = try std.fs.cwd().createFileZ("before-clean.json", .{});
// defer dump_file.close();
// try std.json.stringify(this, .{ .whitespace = .indent_2 }, dump_file.writer());
// }
// This is definitely a memory leak, but it's fine because there is no install api, so this can only be leaked once per process.
// This operation is neccecary because callers of `loadFromDisk` assume the data is written into the passed `this`.
// You'll find that not cleaning the lockfile will cause `bun install` to not actually install anything since it doesnt have any hoisted trees.
this.* = (try this.cleanWithLogger(&[_]Install.PackageManager.UpdateRequest{}, log, false)).*;
// if (Environment.isDebug) {
// const dump_file = try std.fs.cwd().createFileZ("after-clean.json", .{});
// defer dump_file.close();
// try std.json.stringify(this, .{ .whitespace = .indent_2 }, dump_file.writer());
// }
if (Environment.allow_assert) {
try this.verifyData();
}
this.meta_hash = try this.generateMetaHash(false);
return LoadFromDiskResult{ .ok = this };
}
fn packageNameFromPath(pkg_path: []const u8) []const u8 {
if (pkg_path.len == 0) return "";
const pkg_name_start: usize = if (strings.lastIndexOf(pkg_path, "/node_modules/")) |last_index|
last_index + "/node_modules/".len
else if (strings.hasPrefixComptime(pkg_path, "node_modules/"))
"node_modules/".len
else
strings.lastIndexOf(pkg_path, "/") orelse 0;
return pkg_path[pkg_name_start..];
}

View File

@@ -327,12 +327,18 @@ pub const OperatingSystem = enum(u16) {
return (@intFromEnum(this) & linux) != 0;
} else if (comptime Environment.isMac) {
return (@intFromEnum(this) & darwin) != 0;
} else if (comptime Environment.isWindows) {
return (@intFromEnum(this) & win32) != 0;
} else {
return false;
}
}
const NameMap = ComptimeStringMap(u16, .{
pub inline fn has(this: OperatingSystem, other: u16) bool {
return (@intFromEnum(this) & other) != 0;
}
pub const NameMap = ComptimeStringMap(u16, .{
.{ "aix", aix },
.{ "darwin", darwin },
.{ "freebsd", freebsd },
@@ -383,7 +389,7 @@ pub const Architecture = enum(u16) {
pub const all_value: u16 = arm | arm64 | ia32 | mips | mipsel | ppc | ppc64 | s390 | s390x | x32 | x64;
const NameMap = ComptimeStringMap(u16, .{
pub const NameMap = ComptimeStringMap(u16, .{
.{ "arm", arm },
.{ "arm64", arm64 },
.{ "ia32", ia32 },
@@ -397,6 +403,10 @@ pub const Architecture = enum(u16) {
.{ "x64", x64 },
});
pub inline fn has(this: Architecture, other: u16) bool {
return (@intFromEnum(this) & other) != 0;
}
pub fn isMatch(this: Architecture) bool {
if (comptime Environment.isAarch64) {
return (@intFromEnum(this) & arm64) != 0;

View File

@@ -15,6 +15,14 @@ pub const Resolution = extern struct {
_padding: [7]u8 = .{0} ** 7,
value: Value = .{ .uninitialized = {} },
/// Use like Resolution.init(.{ .npm = VersionedURL{ ... } })
pub inline fn init(value: anytype) Resolution {
return Resolution{
.tag = @field(Tag, @typeInfo(@TypeOf(value)).Struct.fields[0].name),
.value = Value.init(value),
};
}
pub fn order(
lhs: *const Resolution,
rhs: *const Resolution,
@@ -107,18 +115,22 @@ pub const Resolution = extern struct {
}),
.root => Value.init(.{ .root = {} }),
else => {
std.debug.panic("Internal error: unexpected resolution tag:,) {}", .{this.tag});
std.debug.panic("Internal error: unexpected resolution tag: {}", .{this.tag});
},
},
};
}
pub fn fmt(this: *const Resolution, buf: []const u8) Formatter {
return Formatter{ .resolution = this, .buf = buf };
pub fn fmt(this: *const Resolution, string_bytes: []const u8) Formatter {
return Formatter{ .resolution = this, .buf = string_bytes };
}
pub fn fmtURL(this: *const Resolution, options: *const PackageManager.Options, buf: []const u8) URLFormatter {
return URLFormatter{ .resolution = this, .buf = buf, .options = options };
pub fn fmtURL(this: *const Resolution, options: *const PackageManager.Options, string_bytes: []const u8) URLFormatter {
return URLFormatter{ .resolution = this, .buf = string_bytes, .options = options };
}
pub fn fmtForDebug(this: *const Resolution, string_bytes: []const u8) DebugFormatter {
return DebugFormatter{ .resolution = this, .buf = string_bytes };
}
pub fn eql(
@@ -225,6 +237,31 @@ pub const Resolution = extern struct {
}
};
pub const DebugFormatter = struct {
resolution: *const Resolution,
buf: []const u8,
pub fn format(formatter: DebugFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void {
try writer.writeAll("Resolution{ .");
try writer.writeAll(std.enums.tagName(Tag, formatter.resolution.tag) orelse "invalid");
try writer.writeAll(" = ");
switch (formatter.resolution.tag) {
.npm => try formatter.resolution.value.npm.version.fmt(formatter.buf).format(layout, opts, writer),
.local_tarball => try writer.writeAll(formatter.resolution.value.local_tarball.slice(formatter.buf)),
.folder => try writer.writeAll(formatter.resolution.value.folder.slice(formatter.buf)),
.remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)),
.git => try formatter.resolution.value.git.formatAs("git+", formatter.buf, layout, opts, writer),
.github => try formatter.resolution.value.github.formatAs("github:", formatter.buf, layout, opts, writer),
.gitlab => try formatter.resolution.value.gitlab.formatAs("gitlab:", formatter.buf, layout, opts, writer),
.workspace => try std.fmt.format(writer, "workspace:{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}),
.symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}),
.single_file_module => try std.fmt.format(writer, "module:{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}),
else => try writer.writeAll("{}"),
}
try writer.writeAll(" }");
}
};
pub const Value = extern union {
uninitialized: void,
root: void,

View File

@@ -1679,7 +1679,7 @@ pub const Resolver = struct {
// check the global cache directory for a package.json file.
var manager = r.getPackageManager();
var dependency_version = Dependency.Version{};
var dependency_behavior = @as(Dependency.Behavior, @enumFromInt(Dependency.Behavior.normal));
var dependency_behavior = Dependency.Behavior.normal;
var string_buf = esm.version;
// const initial_pending_tasks = manager.pending_tasks;

Binary file not shown.

View File

@@ -0,0 +1,3 @@
// Bun Snapshot v1, https://goo.gl/fbAQLP
exports[`bun pm migrate 1`] = `"E7F4C15F76D43059-37ed01456afdc149-B17A9541F8322712-04892ad4e094e703"`;

View File

@@ -1,6 +1,6 @@
import { spawn } from "bun";
import { hash, spawn } from "bun";
import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test";
import { bunExe, bunEnv as env } from "harness";
import { bunEnv, bunExe, bunEnv as env } from "harness";
import { mkdir, writeFile, exists } from "fs/promises";
import { join } from "path";
import {
@@ -15,6 +15,7 @@ import {
root_url,
setHandler,
} from "./dummy.registry";
import { cpSync, rmSync } from "js/node/fs/export-star-from";
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
@@ -332,3 +333,39 @@ it("should remove all cache", async () => {
expect(await exited2).toBe(0);
expect(await exists(cache_dir)).toBeFalse();
});
import { tmpdir } from "os";
it("bun pm migrate", async () => {
const test_dir = join(tmpdir(), "contoso-test" + Math.random().toString(36).slice(2));
cpSync(join(import.meta.dir, "migration/contoso-test"), test_dir, { recursive: true });
const { stdout, stderr, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "pm", "migrate", "--force"],
cwd: test_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env: bunEnv,
});
expect(exitCode).toBe(0);
expect(stderr).toBeDefined();
expect(stdout).toBeDefined();
expect(stdout.toString("utf-8")).toBe("");
expect(stderr.toString("utf-8")).toEndWith("migrated lockfile from package-lock.json\n");
const hashExec = Bun.spawnSync({
cmd: [bunExe(), "pm", "hash"],
cwd: test_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env: bunEnv,
});
expect(hashExec.exitCode).toBe(0);
const hash = hashExec.stdout.toString("utf-8").trim();
expect(hash).toMatchSnapshot();
});

View File

@@ -0,0 +1,210 @@
{
"name": "test3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "test3",
"dependencies": {
"svelte": "*"
}
},
"node_modules/@ampproject/remapping": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz",
"integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.0",
"@jridgewell/trace-mapping": "^0.3.9"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz",
"integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==",
"dependencies": {
"@jridgewell/set-array": "^1.0.1",
"@jridgewell/sourcemap-codec": "^1.4.10",
"@jridgewell/trace-mapping": "^0.3.9"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz",
"integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/set-array": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
"integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.4.15",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg=="
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.19",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz",
"integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@types/estree": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.2.tgz",
"integrity": "sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA=="
},
"node_modules/acorn": {
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
"bin": {
"acorn": "bin/acorn"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/aria-query": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz",
"integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==",
"dependencies": {
"dequal": "^2.0.3"
}
},
"node_modules/axobject-query": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz",
"integrity": "sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg==",
"dependencies": {
"dequal": "^2.0.3"
}
},
"node_modules/code-red": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz",
"integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.4.15",
"@types/estree": "^1.0.1",
"acorn": "^8.10.0",
"estree-walker": "^3.0.3",
"periscopic": "^3.1.0"
}
},
"node_modules/css-tree": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz",
"integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==",
"dependencies": {
"mdn-data": "2.0.30",
"source-map-js": "^1.0.1"
},
"engines": {
"node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
}
},
"node_modules/dequal": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
"engines": {
"node": ">=6"
}
},
"node_modules/estree-walker": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
"integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
"dependencies": {
"@types/estree": "^1.0.0"
}
},
"node_modules/is-reference": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz",
"integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==",
"dependencies": {
"@types/estree": "*"
}
},
"node_modules/locate-character": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz",
"integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="
},
"node_modules/magic-string": {
"version": "0.30.4",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.4.tgz",
"integrity": "sha512-Q/TKtsC5BPm0kGqgBIF9oXAs/xEf2vRKiIB4wCRQTJOQIByZ1d+NnUOotvJOvNpi5RNIgVOMC3pOuaP1ZTDlVg==",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.4.15"
},
"engines": {
"node": ">=12"
}
},
"node_modules/mdn-data": {
"version": "2.0.30",
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
"integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="
},
"node_modules/periscopic": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz",
"integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==",
"dependencies": {
"@types/estree": "^1.0.0",
"estree-walker": "^3.0.0",
"is-reference": "^3.0.0"
}
},
"node_modules/source-map-js": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/svelte": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/svelte/-/svelte-4.0.0.tgz",
"integrity": "sha512-+yCYu3AEUu9n91dnQNGIbnVp8EmNQtuF/YImW4+FTXRHard7NMo+yTsWzggPAbj3fUEJ1FBJLkql/jkp6YB5pg==",
"dependencies": {
"@ampproject/remapping": "^2.2.1",
"@jridgewell/sourcemap-codec": "^1.4.15",
"@jridgewell/trace-mapping": "^0.3.18",
"acorn": "^8.8.2",
"aria-query": "^5.2.1",
"axobject-query": "^3.2.1",
"code-red": "^1.0.3",
"css-tree": "^2.3.1",
"estree-walker": "^3.0.3",
"is-reference": "^3.0.1",
"locate-character": "^3.0.0",
"magic-string": "^0.30.0",
"periscopic": "^3.1.0"
},
"engines": {
"node": ">=16"
}
}
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "test3",
"workspaces": [
"packages/*"
]
}

View File

@@ -0,0 +1,7 @@
{
"name": "a",
"version": "1.0.0",
"dependencies": {
"b": "1.0.0"
}
}

View File

@@ -0,0 +1,7 @@
{
"name": "b",
"version": "1.0.0",
"dependencies": {
"svelte": "^3.0.0"
}
}

View File

@@ -0,0 +1,121 @@
import fs from "fs";
import path from "path";
import { test, expect, describe, beforeAll } from "bun:test";
import { bunEnv, bunExe } from "harness";
import { tmpdir } from "os";
import { join } from "path";
let cwd = join(tmpdir(), "complex-workspace-test" + Math.random().toString(36).slice(2, 8));
function validate(packageName: string, version: string, realPackageName?: string) {
test(`${packageName} is ${realPackageName ? `${realPackageName}@${version}` : version}`, () => {
if (!cwd) throw new Error("install failed");
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, packageName, "package.json"), "utf8"));
expect(pkg.version).toBe(version);
if (realPackageName) {
expect(pkg.name).toBe(realPackageName);
}
});
}
function mustExist(filePath: string) {
test(`${filePath} exists`, () => {
if (!cwd) throw new Error("install failed");
if (!fs.existsSync(path.join(cwd, filePath))) {
throw new Error(`File ${filePath} was not found`);
}
});
}
function mustNotExist(filePath: string) {
test(`${filePath} does not exist`, () => {
if (!cwd) throw new Error("install failed");
if (fs.existsSync(path.join(cwd, filePath))) {
throw new Error(`File ${filePath} was found`);
}
});
}
beforeAll(() => {
fs.cpSync(path.join(import.meta.dir, "complex-workspace"), cwd, { recursive: true });
});
test("the install succeeds", async () => {
var subprocess = Bun.spawn([bunExe(), "reset.ts"], {
env: bunEnv,
cwd,
stdio: ["inherit", "inherit", "inherit"],
});
await subprocess.exited;
if (subprocess.exitCode != 0) {
cwd = false as any;
throw new Error("Failed to install");
}
subprocess = Bun.spawn([bunExe(), "install"], {
env: bunEnv,
cwd,
stdio: ["inherit", "inherit", "inherit"],
});
await subprocess.exited;
if (subprocess.exitCode != 0) {
cwd = false as any;
throw new Error("Failed to install");
}
console.log(cwd);
});
// bun-types
validate("node_modules/bun-types", "1.0.0");
mustExist("node_modules/bun-types/isfake.txt");
// NOTE: ???
// validate("node_modules/bun-types/node_modules/bun-types", "1.0.0");
mustNotExist("node_modules/bun-types/node_modules/bun-types/isfake.txt");
// svelte
validate("node_modules/svelte", "4.1.2");
validate("packages/second/node_modules/svelte", "4.1.0");
validate("packages/with-postinstall/node_modules/svelte", "3.50.0");
// validate("packages/body-parser/node_modules/svelte", "0.2.0", "public-install-test");
// NOTE: bun hoists this dependency higher than npm
// npm places this in node_modules/express
validate("packages/second/node_modules/express", "1.0.0", "svelte");
// install test
// validate("node_modules/install-test", "0.3.0", "publicinstalltest");
// mustExist("node_modules/install-test/src/index.js");
validate("node_modules/install-test1", "0.2.0", "install-test");
mustExist("node_modules/install-test1/index.js");
// validate("node_modules/public-install-test", "0.2.0", "public-install-test");
// mustExist("node_modules/public-install-test/index.js");
// hello
validate("node_modules/hello", "0.3.2");
mustExist("node_modules/hello/version.txt");
mustNotExist("packages/second/node_modules/hello/version.txt");
// body parser
validate("node_modules/body-parser", "200.0.0");
// NOTE: bun hoists this dependency higher than npm
// npm places this in node_modules/not-body-parser
validate("packages/second/node_modules/not-body-parser", "200.0.0", "body-parser");
// NOTE: bun install doesnt properly handle npm aliased dependencies
// validate("packages/second/node_modules/connect", "200.0.0", "body-parser");
validate("packages/second/node_modules/body-parser", "3.21.2", "express");
// NOTE: bun does not hoist this properly, but it is extremely unlikely to be a real use case
// validate("packages/second/node_modules/body-parser/node_modules/body-parser", "1.13.3", "body-parser");
// connect
// mustNotExist("node_modules/connect");
// validate("packages/second/node_modules/body-parser/node_modules/connect", "2.30.2", "connect");
// sharp
validate("node_modules/sharp", "0.32.6");
// iconv-lite
mustNotExist("packages/second/node_modules/body-parser/node_modules/body-parser/node_modules/iconv-lite");
mustNotExist("packages/second/node_modules/body-parser/node_modules/iconv-lite");
mustNotExist("packages/second/node_modules/iconv-lite");
mustNotExist("node_modules/iconv-lite");

View File

@@ -0,0 +1,2 @@
!package-lock.json
bun.lockb

View File

@@ -0,0 +1 @@
true

View File

@@ -0,0 +1,7 @@
{
"name": "bun-types",
"version": "1.0.0",
"dependencies": {
"bun-types": "npm:bun-types@^1.0.0"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
{
"name": "root",
"version": "0.0.0",
"dependencies": {
"bar": "https://github.com/oven-sh/bun/raw/f7e4eb83694aa007a492ef66c28ffbe6a2dae791/test/cli/install/bar-0.0.2.tgz",
"bun-types": "file:bun-types",
"hello": "file:hello-0.3.2.tgz",
"install-test": "bitbucket:dylan-conway/public-install-test",
"install-test1": "git+ssh://git@github.com/dylan-conway/install-test.git#596234dab30564f37adae1e5c4d7123bcffce537",
"public-install-test": "gitlab:dylan-conway/public-install-test",
"svelte": "4.1.2"
},
"workspaces": [
"packages/*"
]
}

View File

@@ -0,0 +1,7 @@
{
"name": "body-parser",
"version": "200.0.0",
"dependencies": {
"svelte": "git+ssh://git@gitlab.com/dylan-conway/public-install-test.git#93f3aa4ec9ca8a0bacc010776db48bfcd915c44c"
}
}

View File

@@ -0,0 +1,6 @@
{
"name": "lol",
"dependencies": {
"esbuild": "^0.19.4"
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "second",
"version": "3.0.0",
"dependencies": {
"body-parser": "npm:express@*",
"express": "npm:svelte@*",
"hello": "0.3.2",
"lol": "*",
"not-body-parser": "*",
"svelte": "4.1.0"
}
}

View File

@@ -0,0 +1 @@
postinstall.txt

View File

@@ -0,0 +1,11 @@
{
"name": "with-postinstall",
"version": "1.0.0",
"dependencies": {
"sharp": "*",
"svelte": "3.50.0"
},
"scripts": {
"postinstall": "bun postinstall.js"
}
}

View File

@@ -0,0 +1,6 @@
import { writeFileSync } from "fs";
writeFileSync(import.meta.dir + "/postinstall.txt", `i ran!`);
// TODO: postinstall doesnt run sharp's scripts yet :(
// import "sharp";

View File

@@ -0,0 +1 @@
this case has a very fun and hard to understand dependency graph. bun pm migrate is expected to migrate it without changing what packages will install. the hoisting doesnt match exactly what npm does, but it is equally valid.

View File

@@ -0,0 +1,9 @@
import fs from "fs";
fs.rmSync("bun.lockb", { recursive: true, force: true });
fs.rmSync("node_modules", { recursive: true, force: true });
fs.rmSync("packages/body-parser/node_modules", { recursive: true, force: true });
fs.rmSync("packages/lol-package/node_modules", { recursive: true, force: true });
fs.rmSync("packages/second/node_modules", { recursive: true, force: true });
fs.rmSync("packages/with-postinstall/node_modules", { recursive: true, force: true });
fs.rmSync("packages/with-postinstall/postinstall.txt", { recursive: true, force: true });

View File

@@ -0,0 +1,2 @@
!package-lock.json
bun.lockb

View File

@@ -0,0 +1,21 @@
MIT License
Copyright 2022 (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@@ -0,0 +1,42 @@
{
"name": "api",
"version": "1.0.0",
"description": "",
"scripts": {
"env": "azd env get-values --no-prompt > .env",
"build": "tsc",
"watch": "tsc --w",
"prestart": "tsc && func extensions install",
"start:host": "func start",
"start": "npm-run-all --parallel start:host watch",
"lint": "eslint --config ../../.eslintrc.js",
"lint:fix": "npm run lint -- --fix"
},
"dependencies": {
"@azure/identity": "^2.1.0",
"@azure/keyvault-secrets": "^4.5.1",
"applicationinsights": "^2.3.4",
"dotenv": "^16.0.1",
"mongodb": "^4.7.0",
"mongoose": "^6.5.3",
"pg": "^8.8.0",
"swagger-ui-dist": "^4.14.0",
"winston": "^3.8.1",
"winston-transport": "^4.5.0",
"yamljs": "^0.3.0"
},
"devDependencies": {
"@azure/functions": "^1.2.3",
"@types/node": "^18.11.18",
"@types/pg": "^8.6.6",
"@types/swagger-ui-dist": "^3.30.1",
"@types/yamljs": "^0.2.31",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"azure-functions-core-tools": "4.0.5095",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.6.0",
"npm-run-all": "^4.1.5",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,34 @@
{
"name": "blog-cms",
"private": true,
"version": "0.1.0",
"description": "The blog backend",
"scripts": {
"develop": "strapi develop",
"start": "strapi start",
"build": "strapi build",
"strapi": "strapi",
"docker:build": "docker build --tag cms .",
"docker:run": "docker run --rm --publish 1337:1337 --env-file ../../.env.docker cms"
},
"devDependencies": {
"@types/lodash.set": "^4.3.7"
},
"dependencies": {
"@strapi/plugin-graphql": "^4.9.1",
"@strapi/plugin-i18n": "^4.5.6",
"@strapi/plugin-users-permissions": "^4.5.6",
"@strapi/strapi": "^4.5.6",
"lodash.set": "^4.3.2",
"pg": "^8.8.0",
"strapi-provider-upload-azure-storage": "^2.1.0"
},
"strapi": {
"uuid": "42db7187-d121-4240-b174-3b2b3605f696"
},
"engines": {
"node": ">=14.19.1 <=18.x.x",
"npm": ">=6.0.0"
},
"license": "MIT"
}

View File

@@ -0,0 +1,38 @@
{
"name": "blog",
"version": "1.0.2",
"private": true,
"scripts": {
"develop": "next dev",
"dev": "next dev",
"build": "next build",
"start": "next start",
"deploy": "next build && next export",
"lint": "next lint",
"lint:fix": "next lint --fix",
"docker:build": "docker build --tag blog .",
"docker:run": "docker run --rm --publish 3000:3000 --env-file ../../.env.docker blog"
},
"dependencies": {
"next": "^13.3.0",
"qs": "^6.10.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^8.0.4",
"rehype-raw": "^6.1.1",
"rehype-sanitize": "^5.0.1",
"uikit": "^3.16.14"
},
"license": "MIT",
"devDependencies": {
"@types/node": "^18.11.18",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-next": "^13.3.0",
"eslint-config-prettier": "^8.6.0",
"eslint-plugin-prettier": "^4.2.1",
"prettier": "^2.8.2",
"typescript": "^4.8.4"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,17 @@
{
"name": "contoso-real-estate",
"version": "1.0.0",
"description": "https://github.com/Azure-Samples/contoso-real-estate",
"private": true,
"author": "Microsoft",
"license": "MIT",
"workspaces": [
"packages/*"
],
"devDependencies": {
"concurrently": "^7.6.0",
"git-commit-msg-linter": "^4.2.1",
"prettier": "^2.7.1",
"rimraf": "^4.3.1"
}
}

View File

@@ -0,0 +1,42 @@
{
"name": "api",
"version": "1.0.0",
"description": "",
"scripts": {
"env": "azd env get-values --no-prompt > .env",
"build": "tsc",
"watch": "tsc --w",
"prestart": "tsc && func extensions install",
"start:host": "func start",
"start": "npm-run-all --parallel start:host watch",
"lint": "eslint --config ../../.eslintrc.js",
"lint:fix": "npm run lint -- --fix"
},
"dependencies": {
"@azure/identity": "^2.1.0",
"@azure/keyvault-secrets": "^4.5.1",
"applicationinsights": "^2.3.4",
"dotenv": "^16.0.1",
"mongodb": "^4.7.0",
"mongoose": "^6.5.3",
"pg": "^8.8.0",
"swagger-ui-dist": "^4.14.0",
"winston": "^3.8.1",
"winston-transport": "^4.5.0",
"yamljs": "^0.3.0"
},
"devDependencies": {
"@azure/functions": "^1.2.3",
"@types/node": "^18.11.18",
"@types/pg": "^8.6.6",
"@types/swagger-ui-dist": "^3.30.1",
"@types/yamljs": "^0.2.31",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"azure-functions-core-tools": "4.0.5095",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.6.0",
"npm-run-all": "^4.1.5",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,34 @@
{
"name": "blog-cms",
"private": true,
"version": "0.1.0",
"description": "The blog backend",
"scripts": {
"develop": "strapi develop",
"start": "strapi start",
"build": "strapi build",
"strapi": "strapi",
"docker:build": "docker build --tag cms .",
"docker:run": "docker run --rm --publish 1337:1337 --env-file ../../.env.docker cms"
},
"devDependencies": {
"@types/lodash.set": "^4.3.7"
},
"dependencies": {
"@strapi/plugin-graphql": "^4.9.1",
"@strapi/plugin-i18n": "^4.5.6",
"@strapi/plugin-users-permissions": "^4.5.6",
"@strapi/strapi": "^4.5.6",
"lodash.set": "^4.3.2",
"pg": "^8.8.0",
"strapi-provider-upload-azure-storage": "^2.1.0"
},
"strapi": {
"uuid": "42db7187-d121-4240-b174-3b2b3605f696"
},
"engines": {
"node": ">=14.19.1 <=18.x.x",
"npm": ">=6.0.0"
},
"license": "MIT"
}

View File

@@ -0,0 +1,38 @@
{
"name": "blog",
"version": "1.0.2",
"private": true,
"scripts": {
"develop": "next dev",
"dev": "next dev",
"build": "next build",
"start": "next start",
"deploy": "next build && next export",
"lint": "next lint",
"lint:fix": "next lint --fix",
"docker:build": "docker build --tag blog .",
"docker:run": "docker run --rm --publish 3000:3000 --env-file ../../.env.docker blog"
},
"dependencies": {
"next": "^13.3.0",
"qs": "^6.10.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^8.0.4",
"rehype-raw": "^6.1.1",
"rehype-sanitize": "^5.0.1",
"uikit": "^3.16.14"
},
"license": "MIT",
"devDependencies": {
"@types/node": "^18.11.18",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-next": "^13.3.0",
"eslint-config-prettier": "^8.6.0",
"eslint-plugin-prettier": "^4.2.1",
"prettier": "^2.8.2",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,52 @@
{
"name": "portal",
"version": "0.0.0",
"scripts": {
"ng": "ng",
"swa": "swa",
"start": "ng serve --disable-host-check",
"start:swa": "swa start",
"build": "ng build",
"watch": "ng build --watch --configuration development",
"test": "ng test",
"lint": "eslint --config ../../.eslintrc.js",
"lint:fix": "npm run lint -- --fix"
},
"private": true,
"dependencies": {
"@angular/animations": "^16.0.5",
"@angular/cdk": "^16.0.4",
"@angular/common": "^16.0.5",
"@angular/compiler": "^16.0.5",
"@angular/core": "^16.0.5",
"@angular/forms": "^16.0.5",
"@angular/material": "^16.0.4",
"@angular/platform-browser": "^16.0.5",
"@angular/platform-browser-dynamic": "^16.0.5",
"@angular/router": "^16.0.5",
"@fortawesome/angular-fontawesome": "^0.13.0",
"@fortawesome/fontawesome-svg-core": "^6.4.2",
"@fortawesome/free-brands-svg-icons": "^6.4.2",
"@fortawesome/free-solid-svg-icons": "^6.4.2",
"apollo-angular": "^5.0.1",
"graphql": "^16.6.0",
"rxjs": "~7.5.0",
"tslib": "^2.3.0",
"zone.js": "~0.13.0"
},
"devDependencies": {
"@angular-devkit/build-angular": "^16.0.5",
"@angular/cli": "^16.0.5",
"@angular/compiler-cli": "^16.0.5",
"@azure/static-web-apps-cli": "^1.0.6",
"@ngx-env/builder": "^16.0.0",
"@types/jest": "^29.2.4",
"@types/node": "^18.11.18",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.6.0",
"jest": "^29.5.0",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,40 @@
{
"name": "stripe-api",
"version": "1.0.0",
"description": "Stripe service",
"private": true,
"type": "module",
"main": "app.ts",
"directories": {
"test": "test"
},
"scripts": {
"test": "npm run build:ts && tsc -p test/tsconfig.json && tap --ts \"test-dist/test/**/*.test.js\"",
"start": "fastify start -l info dist/app.js -a 0.0.0.0 -p 4242",
"build:ts": "tsc",
"watch:ts": "tsc -w",
"dev": "npm run build:ts && concurrently -k -p \"[{name}]\" -n \"TypeScript,App\" -c \"yellow.bold,cyan.bold\" \"npm:watch:ts\" \"npm:dev:start\"",
"dev:start": "fastify start --ignore-watch=.ts$ -w -l info -p 4242 -P dist/app.js",
"docker:build": "docker build --tag stripe --file ./Dockerfile ../..",
"docker:run": "docker run --rm --publish 4242:4242 --env-file ../../.env.docker stripe"
},
"dependencies": {
"@fastify/autoload": "^5.0.0",
"@fastify/sensible": "^5.0.0",
"fastify": "^4.0.0",
"fastify-cli": "^5.7.0",
"fastify-plugin": "^4.0.0",
"fastify-raw-body": "^4.2.0",
"node-fetch": "^3.3.0",
"stripe": "^13.7.0"
},
"devDependencies": {
"@types/node": "^18.0.0",
"@types/tap": "^15.0.5",
"concurrently": "^7.0.0",
"fastify-tsconfig": "^1.0.1",
"tap": "^16.1.0",
"ts-node": "^10.4.0",
"typescript": "^4.5.4"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "testing",
"version": "1.0.0",
"description": "This document will guide you through the process of setting up a new Playwright test project, and authoring and running end-to-end tests for the Contoso web application.",
"main": "index.js",
"scripts": {},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@playwright/test": "^1.31.1",
"@types/node": "^18.14.5"
},
"dependencies": {
"dotenv": "^16.0.3"
}
}

View File

@@ -0,0 +1,52 @@
{
"name": "portal",
"version": "0.0.0",
"scripts": {
"ng": "ng",
"swa": "swa",
"start": "ng serve --disable-host-check",
"start:swa": "swa start",
"build": "ng build",
"watch": "ng build --watch --configuration development",
"test": "ng test",
"lint": "eslint --config ../../.eslintrc.js",
"lint:fix": "npm run lint -- --fix"
},
"private": true,
"dependencies": {
"@angular/animations": "^16.0.5",
"@angular/cdk": "^16.0.4",
"@angular/common": "^16.0.5",
"@angular/compiler": "^16.0.5",
"@angular/core": "^16.0.5",
"@angular/forms": "^16.0.5",
"@angular/material": "^16.0.4",
"@angular/platform-browser": "^16.0.5",
"@angular/platform-browser-dynamic": "^16.0.5",
"@angular/router": "^16.0.5",
"@fortawesome/angular-fontawesome": "^0.13.0",
"@fortawesome/fontawesome-svg-core": "^6.4.2",
"@fortawesome/free-brands-svg-icons": "^6.4.2",
"@fortawesome/free-solid-svg-icons": "^6.4.2",
"apollo-angular": "^5.0.1",
"graphql": "^16.6.0",
"rxjs": "~7.5.0",
"tslib": "^2.3.0",
"zone.js": "~0.13.0"
},
"devDependencies": {
"@angular-devkit/build-angular": "^16.0.5",
"@angular/cli": "^16.0.5",
"@angular/compiler-cli": "^16.0.5",
"@azure/static-web-apps-cli": "^1.0.6",
"@ngx-env/builder": "^16.0.0",
"@types/jest": "^29.2.4",
"@types/node": "^18.11.18",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.6.0",
"jest": "^29.5.0",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,40 @@
{
"name": "stripe-api",
"version": "1.0.0",
"description": "Stripe service",
"private": true,
"type": "module",
"main": "app.ts",
"directories": {
"test": "test"
},
"scripts": {
"test": "npm run build:ts && tsc -p test/tsconfig.json && tap --ts \"test-dist/test/**/*.test.js\"",
"start": "fastify start -l info dist/app.js -a 0.0.0.0 -p 4242",
"build:ts": "tsc",
"watch:ts": "tsc -w",
"dev": "npm run build:ts && concurrently -k -p \"[{name}]\" -n \"TypeScript,App\" -c \"yellow.bold,cyan.bold\" \"npm:watch:ts\" \"npm:dev:start\"",
"dev:start": "fastify start --ignore-watch=.ts$ -w -l info -p 4242 -P dist/app.js",
"docker:build": "docker build --tag stripe --file ./Dockerfile ../..",
"docker:run": "docker run --rm --publish 4242:4242 --env-file ../../.env.docker stripe"
},
"dependencies": {
"@fastify/autoload": "^5.0.0",
"@fastify/sensible": "^5.0.0",
"fastify": "^4.0.0",
"fastify-cli": "^5.7.0",
"fastify-plugin": "^4.0.0",
"fastify-raw-body": "^4.2.0",
"node-fetch": "^3.3.0",
"stripe": "^13.7.0"
},
"devDependencies": {
"@types/node": "^18.0.0",
"@types/tap": "^15.0.5",
"concurrently": "^7.0.0",
"fastify-tsconfig": "^1.0.1",
"tap": "^16.1.0",
"ts-node": "^10.4.0",
"typescript": "^4.5.4"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "testing",
"version": "1.0.0",
"description": "This document will guide you through the process of setting up a new Playwright test project, and authoring and running end-to-end tests for the Contoso web application.",
"main": "index.js",
"scripts": {},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@playwright/test": "^1.31.1",
"@types/node": "^18.14.5"
},
"dependencies": {
"dotenv": "^16.0.3"
}
}

View File

@@ -0,0 +1,54 @@
import fs from "fs";
import { test, expect } from "bun:test";
import { bunEnv, bunExe } from "harness";
import { join } from "path";
import { mkdtempSync } from "js/node/fs/export-star-from";
import { tmpdir } from "os";
test("migrate from npm during `bun add`", async () => {
const testDir = mkdtempSync(join(tmpdir(), "migrate-"));
fs.writeFileSync(
join(testDir, "package.json"),
JSON.stringify({
name: "test3",
dependencies: {
"svelte": "*",
},
}),
);
fs.cpSync(join(import.meta.dir, "add-while-migrate-fixture.json"), join(testDir, "package-lock.json"));
Bun.spawnSync([bunExe(), "add", "lodash@4.17.21"], {
env: bunEnv,
cwd: testDir,
});
expect(fs.existsSync(join(testDir, "node_modules/lodash"))).toBeTrue();
const svelte_version = JSON.parse(fs.readFileSync(join(testDir, "node_modules/svelte/package.json"), "utf8")).version;
expect(svelte_version).toBe("4.0.0");
const lodash_version = JSON.parse(fs.readFileSync(join(testDir, "node_modules/lodash/package.json"), "utf8")).version;
expect(lodash_version).toBe("4.17.21");
});
// Currently this upgrades svelte :(
test.todo("migrate workspace from npm during `bun add`", async () => {
const testDir = join(tmpdir(), "migrate-" + Math.random().toString(36).slice(2));
fs.cpSync(join(import.meta.dir, "add-while-migrate-workspace"), testDir, { recursive: true });
Bun.spawnSync([bunExe(), "add", "lodash@4.17.21"], {
env: bunEnv,
cwd: join(testDir, "packages", "a"),
});
expect(fs.existsSync(join(testDir, "node_modules/lodash"))).toBeTrue();
const lodash_version = JSON.parse(fs.readFileSync(join(testDir, "node_modules/lodash/package.json"), "utf8")).version;
expect(lodash_version).toBe("4.17.21");
const svelte_version = JSON.parse(fs.readFileSync(join(testDir, "node_modules/svelte/package.json"), "utf8")).version;
expect(svelte_version).toBe("3.0.0");
});