mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
[bun add] various fixes (#1953)
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": false,
|
||||
"bracketSpacing": true,
|
||||
"trailingComma": "all"
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 120,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false
|
||||
}
|
||||
|
||||
@@ -2230,7 +2230,7 @@ pub const PackageManager = struct {
|
||||
|
||||
const preinstall = this.determinePreinstallState(package, this.lockfile);
|
||||
|
||||
if (comptime Environment.isDebug or Environment.isTest) std.debug.assert(package.meta.id != invalid_package_id);
|
||||
if (comptime Environment.allow_assert) std.debug.assert(package.meta.id != invalid_package_id);
|
||||
defer successFn(this, dependency_id, package.meta.id);
|
||||
switch (preinstall) {
|
||||
// Is this package already in the cache?
|
||||
@@ -4276,19 +4276,11 @@ pub const PackageManager = struct {
|
||||
if (query.expr.data == .e_object) {
|
||||
if (query.expr.asProperty(update.name)) |value| {
|
||||
if (value.expr.data == .e_string) {
|
||||
if (update.resolved_name.isEmpty()) {
|
||||
if (!update.resolved_name.isEmpty() and strings.eql(list, dependency_list)) {
|
||||
replacing += 1;
|
||||
} else {
|
||||
update.e_string = value.expr.data.e_string;
|
||||
remaining -= 1;
|
||||
} else {
|
||||
replacing += 1;
|
||||
update.e_string = (try JSAst.Expr.init(
|
||||
JSAst.E.String,
|
||||
JSAst.E.String{
|
||||
// we set it later
|
||||
.data = "",
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
).clone(allocator)).data.e_string;
|
||||
}
|
||||
}
|
||||
break :outer;
|
||||
@@ -4298,16 +4290,13 @@ pub const PackageManager = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (remaining == 0 and replacing == 0)
|
||||
if (remaining == 0)
|
||||
break :ast_modifier;
|
||||
|
||||
var dependencies: []G.Property = &[_]G.Property{};
|
||||
var dependencies_obj: ?*JSAst.E.Object = null;
|
||||
if (current_package_json.asProperty(dependency_list)) |query| {
|
||||
if (query.expr.data == .e_object) {
|
||||
dependencies_obj = query.expr.data.e_object;
|
||||
|
||||
dependencies = dependencies_obj.?.properties.slice();
|
||||
dependencies = query.expr.data.e_object.properties.slice();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4317,18 +4306,25 @@ pub const PackageManager = struct {
|
||||
|
||||
outer: for (updates) |*update| {
|
||||
if (update.e_string != null) continue;
|
||||
defer std.debug.assert(update.e_string != null);
|
||||
defer if (comptime Environment.allow_assert) std.debug.assert(update.e_string != null);
|
||||
|
||||
var k: usize = 0;
|
||||
while (k < new_dependencies.len) : (k += 1) {
|
||||
if (dependencies_obj) |obj| {
|
||||
if (obj.asProperty(update.name)) |prop| {
|
||||
if (prop.expr.data == .e_string) {
|
||||
var str = try prop.expr.clone(allocator);
|
||||
str.data.e_string.* = try str.data.e_string.clone(allocator);
|
||||
update.e_string = str.data.e_string;
|
||||
continue :outer;
|
||||
if (new_dependencies[k].key) |key| {
|
||||
if (key.data.e_string.eql(string, update.name)) {
|
||||
if (update.resolved_name.isEmpty()) {
|
||||
// This actually is a duplicate
|
||||
// like "react" appearing in both "dependencies" and "optionalDependencies"
|
||||
// For this case, we'll just swap remove it
|
||||
if (new_dependencies.len > 1) {
|
||||
new_dependencies[k] = new_dependencies[new_dependencies.len - 1];
|
||||
new_dependencies = new_dependencies[0 .. new_dependencies.len - 1];
|
||||
} else {
|
||||
new_dependencies = &[_]G.Property{};
|
||||
}
|
||||
continue;
|
||||
}
|
||||
new_dependencies[k].key = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4418,19 +4414,21 @@ pub const PackageManager = struct {
|
||||
}
|
||||
|
||||
for (updates) |*update| {
|
||||
update.e_string.?.data = switch (update.resolution.tag) {
|
||||
.npm => if (update.version.tag == .npm and update.version.value.npm.version.input.len == 0)
|
||||
std.fmt.allocPrint(allocator, "^{}", .{
|
||||
update.resolution.value.npm.version.fmt(update.version_buf),
|
||||
}) catch unreachable
|
||||
else
|
||||
null,
|
||||
.uninitialized => switch (update.version.tag) {
|
||||
.uninitialized => try allocator.dupe(u8, latest),
|
||||
if (update.e_string) |e_string| {
|
||||
e_string.data = switch (update.resolution.tag) {
|
||||
.npm => if (update.version.tag == .npm and update.version.value.npm.version.input.len == 0)
|
||||
std.fmt.allocPrint(allocator, "^{}", .{
|
||||
update.resolution.value.npm.version.fmt(update.version_buf),
|
||||
}) catch unreachable
|
||||
else
|
||||
null,
|
||||
.uninitialized => switch (update.version.tag) {
|
||||
.uninitialized => try allocator.dupe(u8, latest),
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
} orelse try allocator.dupe(u8, update.version.literal.slice(update.version_buf));
|
||||
} orelse try allocator.dupe(u8, update.version.literal.slice(update.version_buf));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -697,17 +697,17 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
|
||||
const root_deps: []const Dependency = dep_list.get(new.buffers.dependencies.items);
|
||||
const resolved_ids: []const PackageID = res_list.get(new.buffers.resolutions.items);
|
||||
|
||||
for (updates) |update, update_i| {
|
||||
for (updates) |*update| {
|
||||
if (update.resolution.tag == .uninitialized) {
|
||||
const name_hash = String.Builder.stringHash(update.name);
|
||||
for (root_deps) |dep, i| {
|
||||
if (dep.name_hash == name_hash) {
|
||||
const package_id = resolved_ids[i];
|
||||
if (package_id > new.packages.len) continue;
|
||||
updates[update_i].version_buf = new.buffers.string_bytes.items;
|
||||
updates[update_i].version = dep.version;
|
||||
updates[update_i].resolution = resolutions[package_id];
|
||||
updates[update_i].resolved_name = names[package_id];
|
||||
update.version_buf = new.buffers.string_bytes.items;
|
||||
update.version = dep.version;
|
||||
update.resolution = resolutions[package_id];
|
||||
update.resolved_name = names[package_id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,8 @@
|
||||
import { file, spawn } from "bun";
|
||||
import {
|
||||
afterAll,
|
||||
afterEach,
|
||||
beforeAll,
|
||||
beforeEach,
|
||||
expect,
|
||||
it,
|
||||
} from "bun:test";
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test";
|
||||
import { bunExe } from "bunExe";
|
||||
import { bunEnv as env } from "bunEnv";
|
||||
import {
|
||||
access,
|
||||
mkdir,
|
||||
mkdtemp,
|
||||
readlink,
|
||||
rm,
|
||||
writeFile,
|
||||
} from "fs/promises";
|
||||
import { access, mkdir, mkdtemp, readlink, rm, writeFile } from "fs/promises";
|
||||
import { join, relative } from "path";
|
||||
import { tmpdir } from "os";
|
||||
import {
|
||||
@@ -47,14 +33,20 @@ afterEach(async () => {
|
||||
});
|
||||
|
||||
it("should add existing package", async () => {
|
||||
await writeFile(join(add_dir, "package.json"), JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}));
|
||||
await writeFile(join(package_dir, "package.json"), JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}));
|
||||
await writeFile(
|
||||
join(add_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}),
|
||||
);
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}),
|
||||
);
|
||||
const add_path = relative(package_dir, add_dir);
|
||||
const { stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "add", `file:${add_path}`],
|
||||
@@ -66,11 +58,7 @@ it("should add existing package", async () => {
|
||||
});
|
||||
expect(stderr).toBeDefined();
|
||||
const err = await new Response(stderr).text();
|
||||
expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual([
|
||||
"bun add",
|
||||
" Saved lockfile",
|
||||
"",
|
||||
]);
|
||||
expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual(["bun add", " Saved lockfile", ""]);
|
||||
expect(stdout).toBeDefined();
|
||||
const out = await new Response(stdout).text();
|
||||
expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
|
||||
@@ -91,10 +79,13 @@ it("should add existing package", async () => {
|
||||
});
|
||||
|
||||
it("should reject missing package", async () => {
|
||||
await writeFile(join(package_dir, "package.json"), JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}));
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}),
|
||||
);
|
||||
const add_path = relative(package_dir, add_dir);
|
||||
const { stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "add", `file:${add_path}`],
|
||||
@@ -122,14 +113,20 @@ it("should reject missing package", async () => {
|
||||
});
|
||||
|
||||
it("should reject invalid path without segfault", async () => {
|
||||
await writeFile(join(add_dir, "package.json"), JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}));
|
||||
await writeFile(join(package_dir, "package.json"), JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}));
|
||||
await writeFile(
|
||||
join(add_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}),
|
||||
);
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "bar",
|
||||
version: "0.0.2",
|
||||
}),
|
||||
);
|
||||
const add_path = relative(package_dir, add_dir);
|
||||
const { stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "add", `file://${add_path}`],
|
||||
@@ -156,9 +153,9 @@ it("should reject invalid path without segfault", async () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle semver-like names", async() => {
|
||||
it("should handle semver-like names", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(async (request) => {
|
||||
setHandler(async request => {
|
||||
expect(request.method).toBe("GET");
|
||||
expect(request.headers.get("accept")).toBe(
|
||||
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
|
||||
@@ -168,18 +165,15 @@ it("should handle semver-like names", async() => {
|
||||
urls.push(request.url);
|
||||
return new Response("not to be found", { status: 404 });
|
||||
});
|
||||
await writeFile(join(package_dir, "package.json"), JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}));
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}),
|
||||
);
|
||||
const { stdout, stderr, exited } = spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"add",
|
||||
"1.2.3",
|
||||
"--config",
|
||||
import.meta.dir + "/basic.toml",
|
||||
],
|
||||
cmd: [bunExe(), "add", "1.2.3", "--config", import.meta.dir + "/basic.toml"],
|
||||
cwd: package_dir,
|
||||
stdout: null,
|
||||
stdin: "pipe",
|
||||
@@ -188,9 +182,7 @@ it("should handle semver-like names", async() => {
|
||||
});
|
||||
expect(stderr).toBeDefined();
|
||||
const err = await new Response(stderr).text();
|
||||
expect(err.split(/\r?\n/)).toContain(
|
||||
'error: package "1.2.3" not found localhost/1.2.3 404',
|
||||
);
|
||||
expect(err.split(/\r?\n/)).toContain('error: package "1.2.3" not found localhost/1.2.3 404');
|
||||
expect(stdout).toBeDefined();
|
||||
expect(await new Response(stdout).text()).toBe("");
|
||||
expect(await exited).toBe(1);
|
||||
@@ -204,9 +196,9 @@ it("should handle semver-like names", async() => {
|
||||
}
|
||||
});
|
||||
|
||||
it("should handle @scoped names", async() => {
|
||||
it("should handle @scoped names", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(async (request) => {
|
||||
setHandler(async request => {
|
||||
expect(request.method).toBe("GET");
|
||||
expect(request.headers.get("accept")).toBe(
|
||||
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
|
||||
@@ -216,18 +208,15 @@ it("should handle @scoped names", async() => {
|
||||
urls.push(request.url);
|
||||
return new Response("not to be found", { status: 404 });
|
||||
});
|
||||
await writeFile(join(package_dir, "package.json"), JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}));
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
}),
|
||||
);
|
||||
const { stdout, stderr, exited } = spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"add",
|
||||
"@bar/baz",
|
||||
"--config",
|
||||
import.meta.dir + "/basic.toml",
|
||||
],
|
||||
cmd: [bunExe(), "add", "@bar/baz", "--config", import.meta.dir + "/basic.toml"],
|
||||
cwd: package_dir,
|
||||
stdout: null,
|
||||
stdin: "pipe",
|
||||
@@ -236,9 +225,7 @@ it("should handle @scoped names", async() => {
|
||||
});
|
||||
expect(stderr).toBeDefined();
|
||||
const err = await new Response(stderr).text();
|
||||
expect(err.split(/\r?\n/)).toContain(
|
||||
'error: package "@bar/baz" not found localhost/@bar/baz 404',
|
||||
);
|
||||
expect(err.split(/\r?\n/)).toContain('error: package "@bar/baz" not found localhost/@bar/baz 404');
|
||||
expect(stdout).toBeDefined();
|
||||
expect(await new Response(stdout).text()).toBe("");
|
||||
expect(await exited).toBe(1);
|
||||
@@ -254,11 +241,13 @@ it("should handle @scoped names", async() => {
|
||||
|
||||
it("should add dependency with specified semver", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}));
|
||||
setHandler(
|
||||
dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}),
|
||||
);
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
@@ -288,26 +277,12 @@ it("should add dependency with specified semver", async () => {
|
||||
" 1 packages installed",
|
||||
]);
|
||||
expect(await exited).toBe(0);
|
||||
expect(urls).toEqual([
|
||||
`${root_url}/baz`,
|
||||
`${root_url}/baz.tgz`,
|
||||
]);
|
||||
expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
|
||||
expect(requested).toBe(2);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
|
||||
".bin",
|
||||
".cache",
|
||||
"baz",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
|
||||
"baz-run",
|
||||
]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
|
||||
join("..", "baz", "index.js"),
|
||||
);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual([
|
||||
"index.js",
|
||||
"package.json",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "baz"]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js"));
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]);
|
||||
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
|
||||
name: "baz",
|
||||
version: "0.0.3",
|
||||
@@ -327,11 +302,13 @@ it("should add dependency with specified semver", async () => {
|
||||
|
||||
it("should add dependency alongside workspaces", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}));
|
||||
setHandler(
|
||||
dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}),
|
||||
);
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
@@ -371,30 +348,13 @@ it("should add dependency alongside workspaces", async () => {
|
||||
" 2 packages installed",
|
||||
]);
|
||||
expect(await exited).toBe(0);
|
||||
expect(urls).toEqual([
|
||||
`${root_url}/baz`,
|
||||
`${root_url}/baz.tgz`,
|
||||
]);
|
||||
expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
|
||||
expect(requested).toBe(2);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
|
||||
".bin",
|
||||
".cache",
|
||||
"bar",
|
||||
"baz",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
|
||||
"baz-run",
|
||||
]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
|
||||
join("..", "baz", "index.js"),
|
||||
);
|
||||
expect(await readlink(join(package_dir, "node_modules", "bar"))).toBe(
|
||||
join("..", "packages", "bar"),
|
||||
);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual([
|
||||
"index.js",
|
||||
"package.json",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar", "baz"]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js"));
|
||||
expect(await readlink(join(package_dir, "node_modules", "bar"))).toBe(join("..", "packages", "bar"));
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]);
|
||||
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
|
||||
name: "baz",
|
||||
version: "0.0.3",
|
||||
@@ -405,7 +365,7 @@ it("should add dependency alongside workspaces", async () => {
|
||||
expect(await file(join(package_dir, "package.json")).json()).toEqual({
|
||||
name: "foo",
|
||||
version: "0.0.1",
|
||||
workspaces: [ "packages/bar" ],
|
||||
workspaces: ["packages/bar"],
|
||||
dependencies: {
|
||||
baz: "^0.0.3",
|
||||
},
|
||||
@@ -415,11 +375,13 @@ it("should add dependency alongside workspaces", async () => {
|
||||
|
||||
it("should add aliased dependency (npm)", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}));
|
||||
setHandler(
|
||||
dummyRegistry(urls, "0.0.3", {
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
}),
|
||||
);
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
@@ -448,26 +410,12 @@ it("should add aliased dependency (npm)", async () => {
|
||||
" 1 packages installed",
|
||||
]);
|
||||
expect(await exited).toBe(0);
|
||||
expect(urls).toEqual([
|
||||
`${root_url}/baz`,
|
||||
`${root_url}/baz.tgz`,
|
||||
]);
|
||||
expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
|
||||
expect(requested).toBe(2);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
|
||||
".bin",
|
||||
".cache",
|
||||
"bar",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
|
||||
"baz-run",
|
||||
]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(
|
||||
join("..", "bar", "index.js"),
|
||||
);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual([
|
||||
"index.js",
|
||||
"package.json",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar"]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]);
|
||||
expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "bar", "index.js"));
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["index.js", "package.json"]);
|
||||
expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({
|
||||
name: "baz",
|
||||
version: "0.0.3",
|
||||
@@ -518,14 +466,8 @@ it("should add aliased dependency (GitHub)", async () => {
|
||||
expect(await exited).toBe(0);
|
||||
expect(urls).toEqual([]);
|
||||
expect(requested).toBe(0);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
|
||||
".bin",
|
||||
".cache",
|
||||
"uglify",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
|
||||
"uglifyjs",
|
||||
]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "uglify"]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["uglifyjs"]);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([
|
||||
"@GH@mishoo-UglifyJS-e219a9a",
|
||||
"uglify",
|
||||
@@ -533,13 +475,7 @@ it("should add aliased dependency (GitHub)", async () => {
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
|
||||
"mishoo-UglifyJS-e219a9a",
|
||||
]);
|
||||
expect(await readlink(join(
|
||||
package_dir,
|
||||
"node_modules",
|
||||
".cache",
|
||||
"uglify",
|
||||
"mishoo-UglifyJS-e219a9a",
|
||||
))).toBe(
|
||||
expect(await readlink(join(package_dir, "node_modules", ".cache", "uglify", "mishoo-UglifyJS-e219a9a"))).toBe(
|
||||
join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
|
||||
);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
|
||||
@@ -556,12 +492,7 @@ it("should add aliased dependency (GitHub)", async () => {
|
||||
"test",
|
||||
"tools",
|
||||
]);
|
||||
const package_json = await file(join(
|
||||
package_dir,
|
||||
"node_modules",
|
||||
"uglify",
|
||||
"package.json",
|
||||
)).json();
|
||||
const package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
|
||||
expect(package_json.name).toBe("uglify-js");
|
||||
expect(package_json.version).toBe("3.14.1");
|
||||
expect(await file(join(package_dir, "package.json")).json()).toEqual({
|
||||
@@ -573,3 +504,95 @@ it("should add aliased dependency (GitHub)", async () => {
|
||||
});
|
||||
await access(join(package_dir, "bun.lockb"));
|
||||
});
|
||||
|
||||
it("should let you add the same package twice", async () => {
|
||||
const urls: string[] = [];
|
||||
setHandler(dummyRegistry(urls, "0.0.3", {}));
|
||||
await writeFile(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "Foo",
|
||||
version: "0.0.1",
|
||||
dependencies: {},
|
||||
}),
|
||||
);
|
||||
// add as non-dev
|
||||
const {
|
||||
stdout: stdout1,
|
||||
stderr: stderr1,
|
||||
exited: exited1,
|
||||
} = spawn({
|
||||
cmd: [bunExe(), "add", "baz@0.0.3", "--config", import.meta.dir + "/basic.toml"],
|
||||
cwd: package_dir,
|
||||
stdout: null,
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
});
|
||||
expect(stderr1).toBeDefined();
|
||||
const err1 = await new Response(stderr1).text();
|
||||
expect(err1).toContain("Saved lockfile");
|
||||
expect(stdout1).toBeDefined();
|
||||
const out1 = await new Response(stdout1).text();
|
||||
expect(out1).toContain("installed baz@0.0.3");
|
||||
expect(out1).toContain("1 packages installed");
|
||||
expect(await exited1).toBe(0);
|
||||
expect(urls).toEqual([`${root_url}/baz`, `${root_url}/baz.tgz`]);
|
||||
expect(requested).toBe(2);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "baz"]);
|
||||
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
|
||||
name: "baz",
|
||||
version: "0.0.3",
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
});
|
||||
expect(await file(join(package_dir, "package.json")).json()).toEqual({
|
||||
name: "Foo",
|
||||
version: "0.0.1",
|
||||
dependencies: {
|
||||
baz: "0.0.3",
|
||||
},
|
||||
});
|
||||
await access(join(package_dir, "bun.lockb"));
|
||||
// re-add as dev
|
||||
urls.length = 0;
|
||||
const {
|
||||
stdout: stdout2,
|
||||
stderr: stderr2,
|
||||
exited: exited2,
|
||||
} = spawn({
|
||||
cmd: [bunExe(), "add", "baz", "-d", "--config", import.meta.dir + "/basic.toml"],
|
||||
cwd: package_dir,
|
||||
stdout: null,
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
});
|
||||
expect(stderr2).toBeDefined();
|
||||
const err2 = await new Response(stderr2).text();
|
||||
expect(err2).toContain("Saved lockfile");
|
||||
expect(stdout2).toBeDefined();
|
||||
const out2 = await new Response(stdout2).text();
|
||||
expect(out2).toContain("installed baz@0.0.3");
|
||||
expect(out2).not.toContain("1 packages installed");
|
||||
expect(await exited2).toBe(0);
|
||||
expect(urls).toEqual([`${root_url}/baz`]);
|
||||
expect(requested).toBe(3);
|
||||
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "baz"]);
|
||||
expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({
|
||||
name: "baz",
|
||||
version: "0.0.3",
|
||||
bin: {
|
||||
"baz-run": "index.js",
|
||||
},
|
||||
});
|
||||
expect(await file(join(package_dir, "package.json")).json()).toEqual({
|
||||
name: "Foo",
|
||||
version: "0.0.1",
|
||||
dependencies: {
|
||||
baz: "^0.0.3",
|
||||
},
|
||||
});
|
||||
await access(join(package_dir, "bun.lockb"));
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@ let handler, server;
|
||||
export let package_dir, requested, root_url;
|
||||
|
||||
export function dummyRegistry(urls, version = "0.0.2", props = {}) {
|
||||
return async (request) => {
|
||||
return async request => {
|
||||
urls.push(request.url);
|
||||
expect(request.method).toBe("GET");
|
||||
if (request.url.endsWith(".tgz")) {
|
||||
|
||||
Reference in New Issue
Block a user