mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 06:12:08 +00:00
Compare commits
2 Commits
dylan/wind
...
zack/patch
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37875005c0 | ||
|
|
aec3c4afd7 |
@@ -10161,6 +10161,16 @@ pub const PackageManager = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator, ids: []const IdPair) !?Lockfile.Tree.NodeModulesFolder {
|
||||
while (iterator.nextNodeModulesFolder(null)) |node_modules| {
|
||||
for (ids) |id| {
|
||||
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue;
|
||||
return node_modules;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator, dependency_id: DependencyID) !?Lockfile.Tree.NodeModulesFolder {
|
||||
while (iterator.nextNodeModulesFolder(null)) |node_modules| {
|
||||
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue;
|
||||
@@ -10170,65 +10180,154 @@ pub const PackageManager = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
fn pkgDepIdForNameAndVersion(
|
||||
const IdPair = struct { DependencyID, PackageID };
|
||||
|
||||
fn pkgInfoForNameAndVersion(
|
||||
lockfile: *Lockfile,
|
||||
iterator: *Lockfile.Tree.Iterator,
|
||||
pkg_maybe_version_to_patch: []const u8,
|
||||
name: []const u8,
|
||||
version: ?[]const u8,
|
||||
) struct { PackageID, DependencyID } {
|
||||
) struct { PackageID, Lockfile.Tree.NodeModulesFolder } {
|
||||
var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator);
|
||||
var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory();
|
||||
defer pairs.deinit();
|
||||
|
||||
const name_hash = String.Builder.stringHash(name);
|
||||
|
||||
const strbuf = lockfile.buffers.string_bytes.items;
|
||||
|
||||
const dependency_id: DependencyID, const pkg_id: PackageID = brk: {
|
||||
var buf: [1024]u8 = undefined;
|
||||
const dependencies = lockfile.buffers.dependencies.items;
|
||||
var buf: [1024]u8 = undefined;
|
||||
const dependencies = lockfile.buffers.dependencies.items;
|
||||
|
||||
var matches_found: u32 = 0;
|
||||
var maybe_first_match: ?struct { DependencyID, PackageID } = null;
|
||||
for (dependencies, 0..) |dep, dep_id| {
|
||||
if (dep.name_hash != name_hash) continue;
|
||||
matches_found += 1;
|
||||
const pkg_id = lockfile.buffers.resolutions.items[dep_id];
|
||||
if (pkg_id == invalid_package_id) continue;
|
||||
const pkg = lockfile.packages.get(pkg_id);
|
||||
if (version) |v| {
|
||||
const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long");
|
||||
if (std.mem.eql(u8, label, v)) break :brk .{ @intCast(dep_id), pkg_id };
|
||||
} else maybe_first_match = .{ @intCast(dep_id), pkg_id };
|
||||
for (dependencies, 0..) |dep, dep_id| {
|
||||
if (dep.name_hash != name_hash) continue;
|
||||
const pkg_id = lockfile.buffers.resolutions.items[dep_id];
|
||||
if (pkg_id == invalid_package_id) continue;
|
||||
const pkg = lockfile.packages.get(pkg_id);
|
||||
if (version) |v| {
|
||||
const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long");
|
||||
if (std.mem.eql(u8, label, v)) {
|
||||
pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory();
|
||||
}
|
||||
} else {
|
||||
pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory();
|
||||
}
|
||||
}
|
||||
|
||||
if (pairs.items.len == 0) {
|
||||
Output.prettyErrorln("\n<r><red>error<r>: package <b>{s}<r> not found<r>", .{pkg_maybe_version_to_patch});
|
||||
Global.crash();
|
||||
return;
|
||||
}
|
||||
|
||||
// user supplied a version e.g. `is-even@1.0.0`
|
||||
if (version != null) {
|
||||
if (pairs.items.len == 1) {
|
||||
const dep_id, const pkg_id = pairs.items[0];
|
||||
const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{pkg_maybe_version_to_patch},
|
||||
);
|
||||
Global.crash();
|
||||
};
|
||||
return .{
|
||||
pkg_id,
|
||||
folder,
|
||||
};
|
||||
}
|
||||
|
||||
const first_match = maybe_first_match orelse {
|
||||
Output.prettyErrorln("\n<r><red>error<r>: package <b>{s}<r> not found<r>", .{pkg_maybe_version_to_patch});
|
||||
// we found multiple dependents of the supplied pkg + version
|
||||
// the final package in the node_modules might be hoisted
|
||||
// so we are going to try looking for each dep id in node_modules
|
||||
_, const pkg_id = pairs.items[0];
|
||||
const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{pkg_maybe_version_to_patch},
|
||||
);
|
||||
Global.crash();
|
||||
return;
|
||||
};
|
||||
|
||||
if (matches_found > 1) {
|
||||
Output.prettyErrorln(
|
||||
"\n<r><red>error<r>: Found multiple versions of <b>{s}<r>, please specify a precise version from the following list:<r>\n",
|
||||
.{name},
|
||||
return .{
|
||||
pkg_id,
|
||||
folder,
|
||||
};
|
||||
}
|
||||
|
||||
// Otherwise the user did not supply a version, just the pkg name
|
||||
|
||||
// Only one match, let's use it
|
||||
if (pairs.items.len == 1) {
|
||||
const dep_id, const pkg_id = pairs.items[0];
|
||||
const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{pkg_maybe_version_to_patch},
|
||||
);
|
||||
var i: usize = 0;
|
||||
const pkg_hashes = lockfile.packages.items(.name_hash);
|
||||
while (i < lockfile.packages.len) {
|
||||
if (std.mem.indexOfScalar(u64, pkg_hashes[i..], name_hash)) |idx| {
|
||||
defer i += idx + 1;
|
||||
const pkg_id = i + idx;
|
||||
const pkg = lockfile.packages.get(pkg_id);
|
||||
if (!std.mem.eql(u8, pkg.name.slice(strbuf), name)) continue;
|
||||
|
||||
Output.prettyError(" {s}@<blue>{}<r>\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) });
|
||||
} else break;
|
||||
}
|
||||
Global.crash();
|
||||
return;
|
||||
}
|
||||
};
|
||||
return .{
|
||||
pkg_id,
|
||||
folder,
|
||||
};
|
||||
}
|
||||
|
||||
break :brk .{ first_match[0], first_match[1] };
|
||||
// Otherwise we have multiple matches
|
||||
//
|
||||
// There are two cases:
|
||||
// a) the multiple matches are all the same underlying package (this happens because there could be multiple dependents of the same package)
|
||||
// b) the matches are actually different packages, we'll prompt the user to select which one
|
||||
|
||||
_, const pkg_id = pairs.items[0];
|
||||
const count = count: {
|
||||
var count: u32 = 0;
|
||||
for (pairs.items) |pair| {
|
||||
if (pair[1] == pkg_id) count += 1;
|
||||
}
|
||||
break :count count;
|
||||
};
|
||||
|
||||
return .{ pkg_id, dependency_id };
|
||||
// Disambiguate case a) from b)
|
||||
if (count == pairs.items.len) {
|
||||
// It may be hoisted, so we'll try the first one that matches
|
||||
const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{pkg_maybe_version_to_patch},
|
||||
);
|
||||
Global.crash();
|
||||
};
|
||||
return .{
|
||||
pkg_id,
|
||||
folder,
|
||||
};
|
||||
}
|
||||
|
||||
Output.prettyErrorln(
|
||||
"\n<r><red>error<r>: Found multiple versions of <b>{s}<r>, please specify a precise version from the following list:<r>\n",
|
||||
.{name},
|
||||
);
|
||||
var i: usize = 0;
|
||||
while (i < pairs.items.len) : (i += 1) {
|
||||
_, const pkgid = pairs.items[i];
|
||||
if (pkgid == invalid_package_id)
|
||||
continue;
|
||||
|
||||
const pkg = lockfile.packages.get(pkgid);
|
||||
|
||||
Output.prettyError(" {s}@<blue>{}<r>\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) });
|
||||
|
||||
if (i + 1 < pairs.items.len) {
|
||||
for (pairs.items[i + 1 ..]) |*p| {
|
||||
if (p[1] == pkgid) {
|
||||
p[1] = invalid_package_id;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Global.crash();
|
||||
}
|
||||
|
||||
const PatchArgKind = enum {
|
||||
@@ -10384,17 +10483,7 @@ pub const PackageManager = struct {
|
||||
.name_and_version => brk: {
|
||||
const pkg_maybe_version_to_patch = argument;
|
||||
const name, const version = Dependency.splitNameAndVersion(pkg_maybe_version_to_patch);
|
||||
const result = pkgDepIdForNameAndVersion(manager.lockfile, pkg_maybe_version_to_patch, name, version);
|
||||
const pkg_id = result[0];
|
||||
const dependency_id = result[1];
|
||||
|
||||
const folder = (try nodeModulesFolderForDependencyID(&iterator, dependency_id)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{pkg_maybe_version_to_patch},
|
||||
);
|
||||
Global.crash();
|
||||
};
|
||||
const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version);
|
||||
|
||||
const pkg = manager.lockfile.packages.get(pkg_id);
|
||||
const pkg_name = pkg.name.slice(strbuf);
|
||||
@@ -10480,6 +10569,7 @@ pub const PackageManager = struct {
|
||||
out_dir: if (bun.Environment.isWindows) []const u16 else void,
|
||||
buf1: if (bun.Environment.isWindows) []u16 else void,
|
||||
buf2: if (bun.Environment.isWindows) []u16 else void,
|
||||
tmpdir_in_node_modules: if (bun.Environment.isWindows) std.fs.Dir else void,
|
||||
) !u32 {
|
||||
var real_file_count: u32 = 0;
|
||||
|
||||
@@ -10494,10 +10584,10 @@ pub const PackageManager = struct {
|
||||
const openFile = std.fs.Dir.openFile;
|
||||
const createFile = std.fs.Dir.createFile;
|
||||
|
||||
// - rename node_modules/$PKG/$FILE -> node_modules/$PKG/$TMPNAME
|
||||
// - create node_modules/$PKG/$FILE
|
||||
// - copy: cache/$PKG/$FILE -> node_modules/$PKG/$FILE
|
||||
// - unlink: $TMPDIR/$FILE
|
||||
// 1. rename original file in node_modules to tmp_dir_in_node_modules
|
||||
// 2. create the file again
|
||||
// 3. copy cache flie to the newly re-created file
|
||||
// 4. profit
|
||||
if (comptime bun.Environment.isWindows) {
|
||||
var tmpbuf: [1024]u8 = undefined;
|
||||
const basename = bun.strings.fromWPath(pathbuf2[0..], entry.basename);
|
||||
@@ -10513,7 +10603,7 @@ pub const PackageManager = struct {
|
||||
if (bun.sys.renameatConcurrently(
|
||||
bun.toFD(destination_dir_.fd),
|
||||
entrypathZ,
|
||||
bun.toFD(destination_dir_.fd),
|
||||
bun.toFD(tmpdir_in_node_modules.fd),
|
||||
tmpname,
|
||||
.{ .move_fallback = true },
|
||||
).asErr()) |e| {
|
||||
@@ -10593,6 +10683,15 @@ pub const PackageManager = struct {
|
||||
Global.crash();
|
||||
}
|
||||
out_dir = buf2[0..outlen];
|
||||
var tmpbuf: [1024]u8 = undefined;
|
||||
const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("tffbp", tmpbuf[0..], bun.fastRandom()) catch |e| {
|
||||
Output.prettyError("<r><red>error<r>: copying file {s}", .{@errorName(e)});
|
||||
Global.crash();
|
||||
});
|
||||
const temp_folder_in_node_modules = try node_modules_folder.makeOpenPath(tmpname, .{});
|
||||
defer {
|
||||
node_modules_folder.deleteTree(tmpname) catch {};
|
||||
}
|
||||
_ = try FileCopier.copy(
|
||||
node_modules_folder,
|
||||
&walker,
|
||||
@@ -10600,6 +10699,7 @@ pub const PackageManager = struct {
|
||||
out_dir,
|
||||
&buf1,
|
||||
&buf2,
|
||||
temp_folder_in_node_modules,
|
||||
);
|
||||
} else if (Environment.isPosix) {
|
||||
_ = try FileCopier.copy(
|
||||
@@ -10609,6 +10709,7 @@ pub const PackageManager = struct {
|
||||
{},
|
||||
{},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -10781,19 +10882,8 @@ pub const PackageManager = struct {
|
||||
},
|
||||
.name_and_version => brk: {
|
||||
const name, const version = Dependency.splitNameAndVersion(argument);
|
||||
const result = pkgDepIdForNameAndVersion(lockfile, argument, name, version);
|
||||
const pkg_id: PackageID = result[0];
|
||||
const dependency_id: DependencyID = result[1];
|
||||
const node_modules = (try nodeModulesFolderForDependencyID(
|
||||
&iterator,
|
||||
dependency_id,
|
||||
)) orelse {
|
||||
Output.prettyError(
|
||||
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
|
||||
.{argument},
|
||||
);
|
||||
Global.crash();
|
||||
};
|
||||
const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version);
|
||||
|
||||
const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{
|
||||
node_modules.relative_path,
|
||||
name,
|
||||
|
||||
@@ -367,7 +367,7 @@ pub const PatchTask = struct {
|
||||
)) {
|
||||
.result => |fd| fd,
|
||||
.err => |e| {
|
||||
return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e});
|
||||
return try log.addErrorFmtNoLoc(this.manager.allocator, "failed adding bun tag: {}", .{e.withPath(buntagbuf[0 .. bun_tag_prefix.len + hashlen :0])});
|
||||
},
|
||||
};
|
||||
_ = bun.sys.close(buntagfd);
|
||||
@@ -387,7 +387,7 @@ pub const PatchTask = struct {
|
||||
bun.toFD(this.callback.apply.cache_dir.fd),
|
||||
this.callback.apply.cache_dir_subpath,
|
||||
.{ .move_fallback = true },
|
||||
).asErr()) |e| return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e});
|
||||
).asErr()) |e| return try log.addErrorFmtNoLoc(this.manager.allocator, "renaming changes to cache dir: {}", .{e.withPath(this.callback.apply.cache_dir_subpath)});
|
||||
}
|
||||
|
||||
pub fn calcHash(this: *PatchTask) ?u64 {
|
||||
|
||||
35
src/sys.zig
35
src/sys.zig
@@ -1818,10 +1818,6 @@ pub fn renameatConcurrentlyWithoutFallback(
|
||||
to: [:0]const u8,
|
||||
) Maybe(void) {
|
||||
var did_atomically_replace = false;
|
||||
if (comptime Environment.isWindows) {
|
||||
// Windows doesn't have an equivalent
|
||||
return renameat(from_dir_fd, from, to_dir_fd, to);
|
||||
}
|
||||
|
||||
attempt_atomic_rename_and_fallback_to_racy_delete: {
|
||||
{
|
||||
@@ -1834,26 +1830,29 @@ pub fn renameatConcurrentlyWithoutFallback(
|
||||
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
|
||||
};
|
||||
|
||||
// Fallback path: the folder exists in the cache dir, it might be in a strange state
|
||||
// let's attempt to atomically replace it with the temporary folder's version
|
||||
if (switch (err.getErrno()) {
|
||||
.EXIST, .NOTEMPTY, .OPNOTSUPP => true,
|
||||
else => false,
|
||||
}) {
|
||||
did_atomically_replace = true;
|
||||
switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{
|
||||
.exchange = true,
|
||||
})) {
|
||||
.err => {},
|
||||
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
|
||||
// Windows doesn't have any equivalent with renameat with swap
|
||||
if (!bun.Environment.isWindows) {
|
||||
// Fallback path: the folder exists in the cache dir, it might be in a strange state
|
||||
// let's attempt to atomically replace it with the temporary folder's version
|
||||
if (switch (err.getErrno()) {
|
||||
.EXIST, .NOTEMPTY, .OPNOTSUPP => true,
|
||||
else => false,
|
||||
}) {
|
||||
did_atomically_replace = true;
|
||||
switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{
|
||||
.exchange = true,
|
||||
})) {
|
||||
.err => {},
|
||||
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
|
||||
}
|
||||
did_atomically_replace = false;
|
||||
}
|
||||
did_atomically_replace = false;
|
||||
}
|
||||
}
|
||||
|
||||
// sad path: let's try to delete the folder and then rename it
|
||||
var to_dir = to_dir_fd.asDir();
|
||||
to_dir.deleteTree(from) catch {};
|
||||
to_dir.deleteTree(to) catch {};
|
||||
switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) {
|
||||
.err => |err| {
|
||||
return .{ .err = err };
|
||||
|
||||
@@ -13,6 +13,88 @@ beforeAll(() => {
|
||||
|
||||
describe("bun patch <pkg>", async () => {
|
||||
describe("workspace interactions", async () => {
|
||||
/**
|
||||
* @repo/eslint-config and @repo/typescript-config both depend on @types/ws@8.5.4
|
||||
* so it should be hoisted to the root node_modules
|
||||
*/
|
||||
describe("inside workspace with hoisting", async () => {
|
||||
const args = [
|
||||
["node_modules/@types/ws", "packages/eslint-config/node_modules/@types/ws"],
|
||||
["@types/ws@8.5.4", "node_modules/@repo/eslint-config/node_modules/@types/ws"],
|
||||
];
|
||||
for (const [arg, path] of args) {
|
||||
test(arg, async () => {
|
||||
const tempdir = tempDirWithFiles("lol", {
|
||||
"package.json": JSON.stringify({
|
||||
"name": "my-workspace",
|
||||
private: "true",
|
||||
version: "0.0.1",
|
||||
"devDependencies": {
|
||||
"@repo/ui": "*",
|
||||
"@repo/eslint-config": "*",
|
||||
"@repo/typescript-config": "*",
|
||||
"@types/ws": "7.4.7",
|
||||
},
|
||||
workspaces: ["packages/*"],
|
||||
}),
|
||||
packages: {
|
||||
"eslint-config": {
|
||||
"package.json": JSON.stringify({
|
||||
name: "@repo/eslint-config",
|
||||
"version": "0.0.0",
|
||||
dependencies: {
|
||||
"@types/ws": "8.5.4",
|
||||
},
|
||||
private: "true",
|
||||
}),
|
||||
},
|
||||
"typescript-config": {
|
||||
"package.json": JSON.stringify({
|
||||
"name": "@repo/typescript-config",
|
||||
"version": "0.0.0",
|
||||
dependencies: {
|
||||
"@types/ws": "8.5.4",
|
||||
},
|
||||
private: "true",
|
||||
}),
|
||||
},
|
||||
"ui": {
|
||||
"package.json": JSON.stringify({
|
||||
name: "@repo/ui",
|
||||
version: "0.0.0",
|
||||
private: "true",
|
||||
devDependencies: {
|
||||
"@repo/eslint-config": "*",
|
||||
"@repo/typescript-config": "*",
|
||||
},
|
||||
}),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log("TEMPDIR", tempdir);
|
||||
|
||||
await $`${bunExe()} i`.env(bunEnv).cwd(tempdir);
|
||||
|
||||
let result = await $`cd packages/eslint-config; ${bunExe()} patch ${arg}`.env(bunEnv).cwd(tempdir);
|
||||
expect(result.stderr.toString()).not.toContain("error");
|
||||
expect(result.stdout.toString()).toContain(
|
||||
`To patch @types/ws, edit the following folder:\n\n ${tempdir}/${path}\n`,
|
||||
);
|
||||
|
||||
await $`echo LOL > ${path}/index.d.ts`.env(bunEnv).cwd(tempdir);
|
||||
|
||||
expectNoError(await $`cd packages/eslint-config; ${bunExe()} patch --commit ${arg}`.env(bunEnv).cwd(tempdir));
|
||||
|
||||
expect(await $`cat ${path}/index.d.ts`.env(bunEnv).cwd(tempdir).text()).toEqual("LOL\n");
|
||||
|
||||
expect(
|
||||
(await $`cat package.json`.cwd(tempdir).env(bunEnv).json()).patchedDependencies["@types/ws@8.5.4"],
|
||||
).toEqual("patches/@types%2Fws@8.5.4.patch");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("inside workspace package", async () => {
|
||||
const args = [
|
||||
["node_modules/@types/ws", "packages/eslint-config/node_modules/@types/ws"],
|
||||
@@ -581,10 +663,10 @@ module.exports = function isEven() {
|
||||
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`,
|
||||
});
|
||||
|
||||
await $`${bunExe()} run index.ts`
|
||||
await $`${bunExe()} i`
|
||||
.env(bunEnv)
|
||||
.cwd(filedir)
|
||||
.then(o => expect(o.stderr.toString()).toBe(""));
|
||||
.cwd(tempdir)
|
||||
.then(o => expect(o.stderr.toString()).not.toContain("error"));
|
||||
|
||||
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir);
|
||||
expect(stderr.toString()).toBe("");
|
||||
@@ -638,10 +720,10 @@ module.exports = function isOdd() {
|
||||
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`,
|
||||
});
|
||||
|
||||
await $`${bunExe()} run index.ts`
|
||||
await $`${bunExe()} i`
|
||||
.env(bunEnv)
|
||||
.cwd(filedir)
|
||||
.then(o => expect(o.stderr.toString()).toBe(""));
|
||||
.cwd(tempdir)
|
||||
.then(o => expect(o.stderr.toString()).not.toContain("error"));
|
||||
|
||||
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir);
|
||||
expect(stderr.toString()).toBe("");
|
||||
|
||||
Reference in New Issue
Block a user