[bun add] various fixes (#1953)

This commit is contained in:
Alex Lam S.L
2023-01-31 23:05:41 +02:00
committed by GitHub
parent 79b32f80fa
commit 9598868b57
6 changed files with 356 additions and 836 deletions

View File

@@ -1,7 +1,6 @@
{ {
"tabWidth": 2, "arrowParens": "avoid",
"useTabs": false, "printWidth": 120,
"singleQuote": false, "trailingComma": "all",
"bracketSpacing": true, "useTabs": false
"trailingComma": "all"
} }

View File

@@ -2230,7 +2230,7 @@ pub const PackageManager = struct {
const preinstall = this.determinePreinstallState(package, this.lockfile); const preinstall = this.determinePreinstallState(package, this.lockfile);
if (comptime Environment.isDebug or Environment.isTest) std.debug.assert(package.meta.id != invalid_package_id); if (comptime Environment.allow_assert) std.debug.assert(package.meta.id != invalid_package_id);
defer successFn(this, dependency_id, package.meta.id); defer successFn(this, dependency_id, package.meta.id);
switch (preinstall) { switch (preinstall) {
// Is this package already in the cache? // Is this package already in the cache?
@@ -4276,19 +4276,11 @@ pub const PackageManager = struct {
if (query.expr.data == .e_object) { if (query.expr.data == .e_object) {
if (query.expr.asProperty(update.name)) |value| { if (query.expr.asProperty(update.name)) |value| {
if (value.expr.data == .e_string) { if (value.expr.data == .e_string) {
if (update.resolved_name.isEmpty()) { if (!update.resolved_name.isEmpty() and strings.eql(list, dependency_list)) {
replacing += 1;
} else {
update.e_string = value.expr.data.e_string; update.e_string = value.expr.data.e_string;
remaining -= 1; remaining -= 1;
} else {
replacing += 1;
update.e_string = (try JSAst.Expr.init(
JSAst.E.String,
JSAst.E.String{
// we set it later
.data = "",
},
logger.Loc.Empty,
).clone(allocator)).data.e_string;
} }
} }
break :outer; break :outer;
@@ -4298,16 +4290,13 @@ pub const PackageManager = struct {
} }
} }
if (remaining == 0 and replacing == 0) if (remaining == 0)
break :ast_modifier; break :ast_modifier;
var dependencies: []G.Property = &[_]G.Property{}; var dependencies: []G.Property = &[_]G.Property{};
var dependencies_obj: ?*JSAst.E.Object = null;
if (current_package_json.asProperty(dependency_list)) |query| { if (current_package_json.asProperty(dependency_list)) |query| {
if (query.expr.data == .e_object) { if (query.expr.data == .e_object) {
dependencies_obj = query.expr.data.e_object; dependencies = query.expr.data.e_object.properties.slice();
dependencies = dependencies_obj.?.properties.slice();
} }
} }
@@ -4317,18 +4306,25 @@ pub const PackageManager = struct {
outer: for (updates) |*update| { outer: for (updates) |*update| {
if (update.e_string != null) continue; if (update.e_string != null) continue;
defer std.debug.assert(update.e_string != null); defer if (comptime Environment.allow_assert) std.debug.assert(update.e_string != null);
var k: usize = 0; var k: usize = 0;
while (k < new_dependencies.len) : (k += 1) { while (k < new_dependencies.len) : (k += 1) {
if (dependencies_obj) |obj| { if (new_dependencies[k].key) |key| {
if (obj.asProperty(update.name)) |prop| { if (key.data.e_string.eql(string, update.name)) {
if (prop.expr.data == .e_string) { if (update.resolved_name.isEmpty()) {
var str = try prop.expr.clone(allocator); // This actually is a duplicate
str.data.e_string.* = try str.data.e_string.clone(allocator); // like "react" appearing in both "dependencies" and "optionalDependencies"
update.e_string = str.data.e_string; // For this case, we'll just swap remove it
continue :outer; if (new_dependencies.len > 1) {
new_dependencies[k] = new_dependencies[new_dependencies.len - 1];
new_dependencies = new_dependencies[0 .. new_dependencies.len - 1];
} else {
new_dependencies = &[_]G.Property{};
}
continue;
} }
new_dependencies[k].key = null;
} }
} }
@@ -4418,19 +4414,21 @@ pub const PackageManager = struct {
} }
for (updates) |*update| { for (updates) |*update| {
update.e_string.?.data = switch (update.resolution.tag) { if (update.e_string) |e_string| {
.npm => if (update.version.tag == .npm and update.version.value.npm.version.input.len == 0) e_string.data = switch (update.resolution.tag) {
std.fmt.allocPrint(allocator, "^{}", .{ .npm => if (update.version.tag == .npm and update.version.value.npm.version.input.len == 0)
update.resolution.value.npm.version.fmt(update.version_buf), std.fmt.allocPrint(allocator, "^{}", .{
}) catch unreachable update.resolution.value.npm.version.fmt(update.version_buf),
else }) catch unreachable
null, else
.uninitialized => switch (update.version.tag) { null,
.uninitialized => try allocator.dupe(u8, latest), .uninitialized => switch (update.version.tag) {
.uninitialized => try allocator.dupe(u8, latest),
else => null,
},
else => null, else => null,
}, } orelse try allocator.dupe(u8, update.version.literal.slice(update.version_buf));
else => null, }
} orelse try allocator.dupe(u8, update.version.literal.slice(update.version_buf));
} }
} }
}; };

View File

@@ -697,17 +697,17 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
const root_deps: []const Dependency = dep_list.get(new.buffers.dependencies.items); const root_deps: []const Dependency = dep_list.get(new.buffers.dependencies.items);
const resolved_ids: []const PackageID = res_list.get(new.buffers.resolutions.items); const resolved_ids: []const PackageID = res_list.get(new.buffers.resolutions.items);
for (updates) |update, update_i| { for (updates) |*update| {
if (update.resolution.tag == .uninitialized) { if (update.resolution.tag == .uninitialized) {
const name_hash = String.Builder.stringHash(update.name); const name_hash = String.Builder.stringHash(update.name);
for (root_deps) |dep, i| { for (root_deps) |dep, i| {
if (dep.name_hash == name_hash) { if (dep.name_hash == name_hash) {
const package_id = resolved_ids[i]; const package_id = resolved_ids[i];
if (package_id > new.packages.len) continue; if (package_id > new.packages.len) continue;
updates[update_i].version_buf = new.buffers.string_bytes.items; update.version_buf = new.buffers.string_bytes.items;
updates[update_i].version = dep.version; update.version = dep.version;
updates[update_i].resolution = resolutions[package_id]; update.resolution = resolutions[package_id];
updates[update_i].resolved_name = names[package_id]; update.resolved_name = names[package_id];
} }
} }
} }

View File

@@ -1,22 +1,8 @@
import { file, spawn } from "bun"; import { file, spawn } from "bun";
import { import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test";
afterAll,
afterEach,
beforeAll,
beforeEach,
expect,
it,
} from "bun:test";
import { bunExe } from "bunExe"; import { bunExe } from "bunExe";
import { bunEnv as env } from "bunEnv"; import { bunEnv as env } from "bunEnv";
import { import { access, mkdir, mkdtemp, readlink, rm, writeFile } from "fs/promises";
access,
mkdir,
mkdtemp,
readlink,
rm,
writeFile,
} from "fs/promises";
import { join, relative } from "path"; import { join, relative } from "path";
import { tmpdir } from "os"; import { tmpdir } from "os";
import { import {
@@ -47,14 +33,20 @@ afterEach(async () => {
}); });
it("should add existing package", async () => { it("should add existing package", async () => {
await writeFile(join(add_dir, "package.json"), JSON.stringify({ await writeFile(
name: "foo", join(add_dir, "package.json"),
version: "0.0.1", JSON.stringify({
})); name: "foo",
await writeFile(join(package_dir, "package.json"), JSON.stringify({ version: "0.0.1",
name: "bar", }),
version: "0.0.2", );
})); await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "bar",
version: "0.0.2",
}),
);
const add_path = relative(package_dir, add_dir); const add_path = relative(package_dir, add_dir);
const { stdout, stderr, exited } = spawn({ const { stdout, stderr, exited } = spawn({
cmd: [bunExe(), "add", `file:${add_path}`], cmd: [bunExe(), "add", `file:${add_path}`],
@@ -66,11 +58,7 @@ it("should add existing package", async () => {
}); });
expect(stderr).toBeDefined(); expect(stderr).toBeDefined();
const err = await new Response(stderr).text(); const err = await new Response(stderr).text();
expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual([ expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual(["bun add", " Saved lockfile", ""]);
"bun add",
" Saved lockfile",
"",
]);
expect(stdout).toBeDefined(); expect(stdout).toBeDefined();
const out = await new Response(stdout).text(); const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
@@ -91,10 +79,13 @@ it("should add existing package", async () => {
}); });
it("should reject missing package", async () => { it("should reject missing package", async () => {
await writeFile(join(package_dir, "package.json"), JSON.stringify({ await writeFile(
name: "bar", join(package_dir, "package.json"),
version: "0.0.2", JSON.stringify({
})); name: "bar",
version: "0.0.2",
}),
);
const add_path = relative(package_dir, add_dir); const add_path = relative(package_dir, add_dir);
const { stdout, stderr, exited } = spawn({ const { stdout, stderr, exited } = spawn({
cmd: [bunExe(), "add", `file:${add_path}`], cmd: [bunExe(), "add", `file:${add_path}`],
@@ -122,14 +113,20 @@ it("should reject missing package", async () => {
}); });
it("should reject invalid path without segfault", async () => { it("should reject invalid path without segfault", async () => {
await writeFile(join(add_dir, "package.json"), JSON.stringify({ await writeFile(
name: "foo", join(add_dir, "package.json"),
version: "0.0.1", JSON.stringify({
})); name: "foo",
await writeFile(join(package_dir, "package.json"), JSON.stringify({ version: "0.0.1",
name: "bar", }),
version: "0.0.2", );
})); await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "bar",
version: "0.0.2",
}),
);
const add_path = relative(package_dir, add_dir); const add_path = relative(package_dir, add_dir);
const { stdout, stderr, exited } = spawn({ const { stdout, stderr, exited } = spawn({
cmd: [bunExe(), "add", `file://${add_path}`], cmd: [bunExe(), "add", `file://${add_path}`],
@@ -156,9 +153,9 @@ it("should reject invalid path without segfault", async () => {
}); });
}); });
it("should handle semver-like names", async() => { it("should handle semver-like names", async () => {
const urls: string[] = []; const urls: string[] = [];
setHandler(async (request) => { setHandler(async request => {
expect(request.method).toBe("GET"); expect(request.method).toBe("GET");
expect(request.headers.get("accept")).toBe( expect(request.headers.get("accept")).toBe(
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*", "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
@@ -168,18 +165,15 @@ it("should handle semver-like names", async() => {
urls.push(request.url); urls.push(request.url);
return new Response("not to be found", { status: 404 }); return new Response("not to be found", { status: 404 });
}); });
await writeFile(join(package_dir, "package.json"), JSON.stringify({ await writeFile(
name: "foo", join(package_dir, "package.json"),
version: "0.0.1", JSON.stringify({
})); name: "foo",
version: "0.0.1",
}),
);
const { stdout, stderr, exited } = spawn({ const { stdout, stderr, exited } = spawn({
cmd: [ cmd: [bunExe(), "add", "1.2.3", "--config", import.meta.dir + "/basic.toml"],
bunExe(),
"add",
"1.2.3",
"--config",
import.meta.dir + "/basic.toml",
],
cwd: package_dir, cwd: package_dir,
stdout: null, stdout: null,
stdin: "pipe", stdin: "pipe",
@@ -188,9 +182,7 @@ it("should handle semver-like names", async() => {
}); });
expect(stderr).toBeDefined(); expect(stderr).toBeDefined();
const err = await new Response(stderr).text(); const err = await new Response(stderr).text();
expect(err.split(/\r?\n/)).toContain( expect(err.split(/\r?\n/)).toContain('error: package "1.2.3" not found localhost/1.2.3 404');
'error: package "1.2.3" not found localhost/1.2.3 404',
);
expect(stdout).toBeDefined(); expect(stdout).toBeDefined();
expect(await new Response(stdout).text()).toBe(""); expect(await new Response(stdout).text()).toBe("");
expect(await exited).toBe(1); expect(await exited).toBe(1);
@@ -204,9 +196,9 @@ it("should handle semver-like names", async() => {
} }
}); });
it("should handle @scoped names", async() => { it("should handle @scoped names", async () => {
const urls: string[] = []; const urls: string[] = [];
setHandler(async (request) => { setHandler(async request => {
expect(request.method).toBe("GET"); expect(request.method).toBe("GET");
expect(request.headers.get("accept")).toBe( expect(request.headers.get("accept")).toBe(
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*", "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
@@ -216,18 +208,15 @@ it("should handle @scoped names", async() => {
urls.push(request.url); urls.push(request.url);
return new Response("not to be found", { status: 404 }); return new Response("not to be found", { status: 404 });
}); });
await writeFile(join(package_dir, "package.json"), JSON.stringify({ await writeFile(
name: "foo", join(package_dir, "package.json"),
version: "0.0.1", JSON.stringify({
})); name: "foo",
version: "0.0.1",
}),
);
const { stdout, stderr, exited } = spawn({ const { stdout, stderr, exited } = spawn({
cmd: [ cmd: [bunExe(), "add", "@bar/baz", "--config", import.meta.dir + "/basic.toml"],
bunExe(),
"add",
"@bar/baz",
"--config",
import.meta.dir + "/basic.toml",
],
cwd: package_dir, cwd: package_dir,
stdout: null, stdout: null,
stdin: "pipe", stdin: "pipe",
@@ -236,9 +225,7 @@ it("should handle @scoped names", async() => {
}); });
expect(stderr).toBeDefined(); expect(stderr).toBeDefined();
const err = await new Response(stderr).text(); const err = await new Response(stderr).text();
expect(err.split(/\r?\n/)).toContain( expect(err.split(/\r?\n/)).toContain('error: package "@bar/baz" not found localhost/@bar/baz 404');
'error: package "@bar/baz" not found localhost/@bar/baz 404',
);
expect(stdout).toBeDefined(); expect(stdout).toBeDefined();
expect(await new Response(stdout).text()).toBe(""); expect(await new Response(stdout).text()).toBe("");
expect(await exited).toBe(1); expect(await exited).toBe(1);
@@ -254,11 +241,13 @@ it("should handle @scoped names", async() => {
it("should add dependency with specified semver", async () => { it("should add dependency with specified semver", async () => {
const urls: string[] = []; const urls: string[] = [];
setHandler(dummyRegistry(urls, "0.0.3", { setHandler(
bin: { dummyRegistry(urls, "0.0.3", {
"baz-run": "index.js", bin: {
}, "baz-run": "index.js",
})); },
}),
);
await writeFile( await writeFile(
join(package_dir, "package.json"), join(package_dir, "package.json"),
JSON.stringify({ JSON.stringify({
@@ -288,26 +277,12 @@ it("should add dependency with specified semver", async () => {
" 1 packages installed", " 1 packages installed",
]); ]);
expect(await exited).toBe(0); expect(await exited).toBe(0);
expect(urls).toEqual([ expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
`${root_url}/baz`,
`${root_url}/baz.tgz`,
]);
expect(requested).toBe(2); expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "baz"]);
".bin", expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
".cache", expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js"));
"baz", expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]);
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
"baz-run",
]);
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
join("..", "baz", "index.js"),
);
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual([
"index.js",
"package.json",
]);
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({ expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
name: "baz", name: "baz",
version: "0.0.3", version: "0.0.3",
@@ -327,11 +302,13 @@ it("should add dependency with specified semver", async () => {
it("should add dependency alongside workspaces", async () => { it("should add dependency alongside workspaces", async () => {
const urls: string[] = []; const urls: string[] = [];
setHandler(dummyRegistry(urls, "0.0.3", { setHandler(
bin: { dummyRegistry(urls, "0.0.3", {
"baz-run": "index.js", bin: {
}, "baz-run": "index.js",
})); },
}),
);
await writeFile( await writeFile(
join(package_dir, "package.json"), join(package_dir, "package.json"),
JSON.stringify({ JSON.stringify({
@@ -371,30 +348,13 @@ it("should add dependency alongside workspaces", async () => {
" 2 packages installed", " 2 packages installed",
]); ]);
expect(await exited).toBe(0); expect(await exited).toBe(0);
expect(urls).toEqual([ expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
`${root_url}/baz`,
`${root_url}/baz.tgz`,
]);
expect(requested).toBe(2); expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar", "baz"]);
".bin", expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
".cache", expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js"));
"bar", expect(await readlink(join(package_dir, "node_modules", "bar"))).toBe(join("..", "packages", "bar"));
"baz", expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]);
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
"baz-run",
]);
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
join("..", "baz", "index.js"),
);
expect(await readlink(join(package_dir, "node_modules", "bar"))).toBe(
join("..", "packages", "bar"),
);
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual([
"index.js",
"package.json",
]);
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({ expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
name: "baz", name: "baz",
version: "0.0.3", version: "0.0.3",
@@ -405,7 +365,7 @@ it("should add dependency alongside workspaces", async () => {
expect(await file(join(package_dir, "package.json")).json()).toEqual({ expect(await file(join(package_dir, "package.json")).json()).toEqual({
name: "foo", name: "foo",
version: "0.0.1", version: "0.0.1",
workspaces: [ "packages/bar" ], workspaces: ["packages/bar"],
dependencies: { dependencies: {
baz: "^0.0.3", baz: "^0.0.3",
}, },
@@ -415,11 +375,13 @@ it("should add dependency alongside workspaces", async () => {
it("should add aliased dependency (npm)", async () => { it("should add aliased dependency (npm)", async () => {
const urls: string[] = []; const urls: string[] = [];
setHandler(dummyRegistry(urls, "0.0.3", { setHandler(
bin: { dummyRegistry(urls, "0.0.3", {
"baz-run": "index.js", bin: {
}, "baz-run": "index.js",
})); },
}),
);
await writeFile( await writeFile(
join(package_dir, "package.json"), join(package_dir, "package.json"),
JSON.stringify({ JSON.stringify({
@@ -448,26 +410,12 @@ it("should add aliased dependency (npm)", async () => {
" 1 packages installed", " 1 packages installed",
]); ]);
expect(await exited).toBe(0); expect(await exited).toBe(0);
expect(urls).toEqual([ expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
`${root_url}/baz`,
`${root_url}/baz.tgz`,
]);
expect(requested).toBe(2); expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar"]);
".bin", expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
".cache", expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "bar", "index.js"));
"bar", expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["index.js", "package.json"]);
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
"baz-run",
]);
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
join("..", "bar", "index.js"),
);
expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual([
"index.js",
"package.json",
]);
expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({ expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({
name: "baz", name: "baz",
version: "0.0.3", version: "0.0.3",
@@ -518,14 +466,8 @@ it("should add aliased dependency (GitHub)", async () => {
expect(await exited).toBe(0); expect(await exited).toBe(0);
expect(urls).toEqual([]); expect(urls).toEqual([]);
expect(requested).toBe(0); expect(requested).toBe(0);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "uglify"]);
".bin", expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["uglifyjs"]);
".cache",
"uglify",
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
"uglifyjs",
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([
"@GH@mishoo-UglifyJS-e219a9a", "@GH@mishoo-UglifyJS-e219a9a",
"uglify", "uglify",
@@ -533,13 +475,7 @@ it("should add aliased dependency (GitHub)", async () => {
expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
"mishoo-UglifyJS-e219a9a", "mishoo-UglifyJS-e219a9a",
]); ]);
expect(await readlink(join( expect(await readlink(join(package_dir, "node_modules", ".cache", "uglify", "mishoo-UglifyJS-e219a9a"))).toBe(
package_dir,
"node_modules",
".cache",
"uglify",
"mishoo-UglifyJS-e219a9a",
))).toBe(
join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"), join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
); );
expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
@@ -556,12 +492,7 @@ it("should add aliased dependency (GitHub)", async () => {
"test", "test",
"tools", "tools",
]); ]);
const package_json = await file(join( const package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
package_dir,
"node_modules",
"uglify",
"package.json",
)).json();
expect(package_json.name).toBe("uglify-js"); expect(package_json.name).toBe("uglify-js");
expect(package_json.version).toBe("3.14.1"); expect(package_json.version).toBe("3.14.1");
expect(await file(join(package_dir, "package.json")).json()).toEqual({ expect(await file(join(package_dir, "package.json")).json()).toEqual({
@@ -573,3 +504,95 @@ it("should add aliased dependency (GitHub)", async () => {
}); });
await access(join(package_dir, "bun.lockb")); await access(join(package_dir, "bun.lockb"));
}); });
it("should let you add the same package twice", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls, "0.0.3", {}));
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "Foo",
version: "0.0.1",
dependencies: {},
}),
);
// add as non-dev
const {
stdout: stdout1,
stderr: stderr1,
exited: exited1,
} = spawn({
cmd: [bunExe(), "add", "baz@0.0.3", "--config", import.meta.dir + "/basic.toml"],
cwd: package_dir,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
expect(stderr1).toBeDefined();
const err1 = await new Response(stderr1).text();
expect(err1).toContain("Saved lockfile");
expect(stdout1).toBeDefined();
const out1 = await new Response(stdout1).text();
expect(out1).toContain("installed baz@0.0.3");
expect(out1).toContain("1 packages installed");
expect(await exited1).toBe(0);
expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "baz"]);
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
name: "baz",
version: "0.0.3",
bin: {
"baz-run": "index.js",
},
});
expect(await file(join(package_dir, "package.json")).json()).toEqual({
name: "Foo",
version: "0.0.1",
dependencies: {
baz: "0.0.3",
},
});
await access(join(package_dir, "bun.lockb"));
// re-add as dev
urls.length = 0;
const {
stdout: stdout2,
stderr: stderr2,
exited: exited2,
} = spawn({
cmd: [bunExe(), "add", "baz", "-d", "--config", import.meta.dir + "/basic.toml"],
cwd: package_dir,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
expect(stderr2).toBeDefined();
const err2 = await new Response(stderr2).text();
expect(err2).toContain("Saved lockfile");
expect(stdout2).toBeDefined();
const out2 = await new Response(stdout2).text();
expect(out2).toContain("installed baz@0.0.3");
expect(out2).not.toContain("1 packages installed");
expect(await exited2).toBe(0);
expect(urls).toEqual([`${root_url}/baz`]);
expect(requested).toBe(3);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "baz"]);
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
name: "baz",
version: "0.0.3",
bin: {
"baz-run": "index.js",
},
});
expect(await file(join(package_dir, "package.json")).json()).toEqual({
name: "Foo",
version: "0.0.1",
dependencies: {
baz: "^0.0.3",
},
});
await access(join(package_dir, "bun.lockb"));
});

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ let handler, server;
export let package_dir, requested, root_url; export let package_dir, requested, root_url;
export function dummyRegistry(urls, version = "0.0.2", props = {}) { export function dummyRegistry(urls, version = "0.0.2", props = {}) {
return async (request) => { return async request => {
urls.push(request.url); urls.push(request.url);
expect(request.method).toBe("GET"); expect(request.method).toBe("GET");
if (request.url.endsWith(".tgz")) { if (request.url.endsWith(".tgz")) {