From 7056384702231f634e4e5d0f7c65965041314fc6 Mon Sep 17 00:00:00 2001 From: Zack Radisic <56137411+zackradisic@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:56:32 -0800 Subject: [PATCH 01/21] Fix file redirect stdin not working (#9000) * Open with proper perms when redirecting file to stdin * Add test for redirecting file to stdin * Extract redirect flags -> bun.Mode logic to function * Remove dead code * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/shell/interpreter.zig | 8 +-- src/shell/shell.zig | 7 ++ test/js/bun/shell/bunshell.test.ts | 104 +++++------------------------ 3 files changed, 29 insertions(+), 90 deletions(-) diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 00b05e43ea..ca3a8cd2da 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -3477,8 +3477,8 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { const path = this.redirection_file.items[0..this.redirection_file.items.len -| 1 :0]; log("EXPANDED REDIRECT: {s}\n", .{this.redirection_file.items[0..]}); const perm = 0o666; - const extra: bun.Mode = if (this.node.redirect.append) std.os.O.APPEND else std.os.O.TRUNC; - const redirfd = switch (Syscall.openat(this.base.shell.cwd_fd, path, std.os.O.WRONLY | std.os.O.CREAT | extra, perm)) { + const flags = this.node.redirect.toFlags(); + const redirfd = switch (Syscall.openat(this.base.shell.cwd_fd, path, flags, perm)) { .err => |e| { const buf = std.fmt.allocPrint(this.spawn_arena.allocator(), "bun: {s}: {s}", .{ e.toSystemError().message, path }) catch bun.outOfMemory(); return this.writeFailingError(buf, 1); @@ -4013,8 +4013,8 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { const path = cmd.redirection_file.items[0..cmd.redirection_file.items.len -| 1 :0]; log("EXPANDED REDIRECT: {s}\n", .{cmd.redirection_file.items[0..]}); const perm = 0o666; - const extra: bun.Mode = if (node.redirect.append) std.os.O.APPEND else std.os.O.TRUNC; - const redirfd = switch (Syscall.openat(cmd.base.shell.cwd_fd, path, std.os.O.WRONLY | std.os.O.CREAT | extra, perm)) { + const flags = node.redirect.toFlags(); + const redirfd = switch (Syscall.openat(cmd.base.shell.cwd_fd, path, flags, perm)) { .err => |e| { const buf = std.fmt.allocPrint(arena.allocator(), "bun: {s}: {s}", .{ e.toSystemError().message, path }) catch bun.outOfMemory(); cmd.writeFailingError(buf, 1); diff --git a/src/shell/shell.zig b/src/shell/shell.zig index 6bf8c6f80a..ca70a12e50 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -444,6 +444,13 @@ pub const AST = struct { append: bool = false, __unused: u4 = 0, + pub fn toFlags(this: RedirectFlags) bun.Mode { + const read_write_flags: bun.Mode = if (this.stdin) std.os.O.RDONLY else std.os.O.WRONLY | std.os.O.CREAT; + const extra: bun.Mode = if (this.append) std.os.O.APPEND else std.os.O.TRUNC; + const final_flags: bun.Mode = if (this.stdin) read_write_flags else extra | read_write_flags; + return final_flags; + } + pub fn @"<"() RedirectFlags { return .{ .stdin = true }; } diff --git a/test/js/bun/shell/bunshell.test.ts b/test/js/bun/shell/bunshell.test.ts index 7d945dfd76..97072e203b 100644 --- a/test/js/bun/shell/bunshell.test.ts +++ b/test/js/bun/shell/bunshell.test.ts @@ -400,7 +400,12 @@ describe("bunshell", () => { let procEnv = JSON.parse(str1); expect(procEnv).toEqual({ ...bunEnv, BAZ: "1", FOO: "bar" }); procEnv = JSON.parse(str2); - expect(procEnv).toEqual({ ...bunEnv, BAZ: "1", FOO: "bar", BUN_TEST_VAR: "1" }); + expect(procEnv).toEqual({ + ...bunEnv, + BAZ: "1", + FOO: "bar", + BUN_TEST_VAR: "1", + }); }); test("syntax edgecase", async () => { @@ -443,11 +448,11 @@ describe("bunshell", () => { describe("rm", () => { let temp_dir: string; const files = { - "foo": "bar", - "bar": "baz", - "dir": { - "some": "more", - "files": "here", + foo: "bar", + bar: "baz", + dir: { + some: "more", + files: "here", }, }; beforeAll(() => { @@ -654,6 +659,8 @@ describe("deno_task", () => { // zero arguments after re-direct await TestBuilder.command`echo 1 > $EMPTY`.stderr("bun: ambiguous redirect: at `echo`\n").exitCode(1).run(); + + await TestBuilder.command`echo foo bar > file.txt; cat < file.txt`.ensureTempDir().stdout("foo bar\n").run(); }); test("pwd", async () => { @@ -676,7 +683,11 @@ describe("deno_task", () => { ...bunEnv, FOO: "bar", }); - expect(JSON.parse(stdout.toString())).toEqual({ ...bunEnv, BUN_TEST_VAR: "1", FOO: "bar" }); + expect(JSON.parse(stdout.toString())).toEqual({ + ...bunEnv, + BUN_TEST_VAR: "1", + FOO: "bar", + }); } { @@ -705,82 +716,3 @@ function sentinelByte(buf: Uint8Array): number { } throw new Error("No sentinel byte"); } - -const foo = { - "stmts": [ - { - "exprs": [ - { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }], - "redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 }, - "redirect_file": { "jsbuf": { "idx": 0 } }, - }, - }, - ], - }, - ], -}; - -const lex = [ - { "Text": "echo" }, - { "Delimit": {} }, - { "CmdSubstBegin": {} }, - { "Text": "echo" }, - { "Delimit": {} }, - { "Text": "ハハ" }, - { "Delimit": {} }, - { "CmdSubstEnd": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 } }, - { "JSObjRef": 0 }, - { "Eof": {} }, -]; - -const lex2 = [ - { "Text": "echo" }, - { "Delimit": {} }, - { "CmdSubstBegin": {} }, - { "Text": "echo" }, - { "Delimit": {} }, - { "Text": "noice" }, - { "Delimit": {} }, - { "CmdSubstEnd": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 } }, - { "JSObjRef": 0 }, - { "Eof": {} }, -]; - -const parse2 = { - "stmts": [ - { - "exprs": [ - { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }], - "redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 }, - "redirect_file": { "jsbuf": { "idx": 0 } }, - }, - }, - ], - }, - ], -}; - -const lsdkjfs = { - "stmts": [ - { - "exprs": [ - { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }], - "redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 }, - "redirect_file": { "jsbuf": { "idx": 0 } }, - }, - }, - ], - }, - ], -}; From 2656418e50b2c67e0939a005c895e63653149415 Mon Sep 17 00:00:00 2001 From: Zack Radisic <56137411+zackradisic@users.noreply.github.com> Date: Mon, 19 Feb 2024 21:26:50 -0800 Subject: [PATCH 02/21] Don't immediately delimit command substitution result (#9005) --- src/shell/interpreter.zig | 2 +- test/js/bun/shell/bunshell.test.ts | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index ca3a8cd2da..e46e3f0fef 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -1757,7 +1757,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { // "aa bbb" this.current_out.appendSlice(stdout[a..b]) catch bun.outOfMemory(); - this.pushCurrentOut(); + // this.pushCurrentOut(); // const slice_z = this.base.interpreter.allocator.dupeZ(u8, stdout[a..b]) catch bun.outOfMemory(); // this.pushResultSlice(slice_z); } diff --git a/test/js/bun/shell/bunshell.test.ts b/test/js/bun/shell/bunshell.test.ts index 97072e203b..5e0e168253 100644 --- a/test/js/bun/shell/bunshell.test.ts +++ b/test/js/bun/shell/bunshell.test.ts @@ -174,6 +174,25 @@ describe("bunshell", () => { doTest(`echo "$(echo 1; echo 2)"`, "1\n2\n"); doTest(`echo "$(echo "1" ; echo "2")"`, "1\n2\n"); doTest(`echo $(echo 1; echo 2)`, "1 2\n"); + + // Issue: #8982 + // https://github.com/oven-sh/bun/issues/8982 + test("word splitting", async () => { + await TestBuilder.command`echo $(echo id)/$(echo region)`.stdout("id/region\n").run(); + await TestBuilder.command`echo $(echo hi id)/$(echo region)`.stdout("hi id/region\n").run(); + + // Make sure its one whole argument + await TestBuilder.command`echo {"console.log(JSON.stringify(process.argv.slice(2)))"} > temp_script.ts; BUN_DEBUG_QUIET_LOGS=1 ${BUN} run temp_script.ts $(echo id)/$(echo region)` + .stdout('["id/region"]\n') + .ensureTempDir() + .run(); + + // Make sure its two separate arguments + await TestBuilder.command`echo {"console.log(JSON.stringify(process.argv.slice(2)))"} > temp_script.ts; BUN_DEBUG_QUIET_LOGS=1 ${BUN} run temp_script.ts $(echo hi id)/$(echo region)` + .stdout('["hi","id/region"]\n') + .ensureTempDir() + .run(); + }); }); describe("unicode", () => { From 48e7c0fb8e5b4eaa4d6d4d14f13113eeb523fbff Mon Sep 17 00:00:00 2001 From: Eemeli Palotie Date: Tue, 20 Feb 2024 08:50:24 +0200 Subject: [PATCH 03/21] fix: install vendored node_modules when using hardlinks (#9007) * fix: install vendored node_modules when using hardlinks * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/install/install.zig | 2 +- test/cli/install/vendor-baz-0.0.1.tgz | Bin 0 -> 376 bytes test/regression/issue/08093.test.ts | 80 ++++++++++++++++++++++++++ 3 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 test/cli/install/vendor-baz-0.0.1.tgz create mode 100644 test/regression/issue/08093.test.ts diff --git a/src/install/install.zig b/src/install/install.zig index d5c6d29bf6..3ee5fdb62f 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1366,7 +1366,7 @@ pub const PackageInstall = struct { cached_package_dir, this.allocator, &[_]bun.OSPathSlice{}, - &[_]bun.OSPathSlice{bun.OSPathLiteral("node_modules")}, + &[_]bun.OSPathSlice{}, ) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; diff --git a/test/cli/install/vendor-baz-0.0.1.tgz b/test/cli/install/vendor-baz-0.0.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..22ae0e3e8c7ee66b9a40d55251bb7c5f55483886 GIT binary patch literal 376 zcmV-;0f+t{iwFQvr_*Hs1MQdFPJ=)YhI`FZtazh|h1u;M6211`H!!qJ8!GG)D2>th z?k<%cqOp|(FxLIJm;^TLz&}6CpHt^K0k7w<~erpkB~R|CpyOGU|IIDhkYNC z^CS98vD2SRYW0_(_=rFs$$aLY>wkGc!m2F7X`(~aYobkzo;NePG${tDSzP%=rO6s~ zY>M$TdhxICyx&gR(HUF+H_?6pjP81m)BdM>|8rIQUs2)wKLoEHA-;|>^lyoO;PRXL z{2GfgF}l740R+_d-kkT(i>?1>%d)o~uucDG59@!y!1aHFaHxO%?|;lpT&37)F|_Lc zL0bKll;HFqf^YSo8#9hE@1hP`>(3~o&i{jOQvVJH!_)mA+w0Hl{KuFS)b)Qu@Wc79 WsRlRvIUEkhx$q8`ic*;X7ytl1lg3p5 literal 0 HcmV?d00001 diff --git a/test/regression/issue/08093.test.ts b/test/regression/issue/08093.test.ts new file mode 100644 index 0000000000..3a98f8e066 --- /dev/null +++ b/test/regression/issue/08093.test.ts @@ -0,0 +1,80 @@ +import { file, spawn } from "bun"; +import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; +import { bunExe, bunEnv as env } from "harness"; +import { access, writeFile } from "fs/promises"; +import { join } from "path"; +import { + dummyAfterAll, + dummyAfterEach, + dummyBeforeAll, + dummyBeforeEach, + dummyRegistry, + package_dir, + readdirSorted, + requested, + root_url, + setHandler, +} from "./../../cli/install/dummy.registry.js"; + +beforeAll(dummyBeforeAll); +afterAll(dummyAfterAll); +beforeEach(dummyBeforeEach); +afterEach(dummyAfterEach); + +it("should install vendored node_modules with hardlink", async () => { + const urls: string[] = []; + setHandler( + dummyRegistry(urls, { + "0.0.1": {}, + latest: "0.0.1", + }), + ); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + "vendor-baz": "0.0.1", + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--backend", "hardlink"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toContain("1 package installed"); + + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/vendor-baz`, `${root_url}/vendor-baz-0.0.1.tgz`]); + expect(requested).toBe(2); + + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "vendor-baz"]); + expect(await readdirSorted(join(package_dir, "node_modules", "vendor-baz"))).toEqual([ + "cjs", + "index.js", + "package.json", + ]); + expect(await readdirSorted(join(package_dir, "node_modules", "vendor-baz", "cjs", "node_modules"))).toEqual([ + "foo-dep", + ]); + expect( + await readdirSorted(join(package_dir, "node_modules", "vendor-baz", "cjs", "node_modules", "foo-dep")), + ).toEqual(["index.js"]); + + expect(await file(join(package_dir, "node_modules", "vendor-baz", "package.json")).json()).toEqual({ + name: "vendor-baz", + version: "0.0.1", + }); + await access(join(package_dir, "bun.lockb")); +}); From 5c6b9ea9b6c52dc01622a3b1b1a355560ddeccd9 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Tue, 20 Feb 2024 18:58:12 -0800 Subject: [PATCH 04/21] change how `bunx` caches things (#8921) * some things * yeah * ok * fix compilation error * fix on windows * ok * username --- src/async/posix_event_loop.zig | 36 +++--- src/bun.js/webcore/streams.zig | 2 +- src/bun.zig | 6 +- src/cache.zig | 4 +- src/cli/bunx_command.zig | 227 +++++++++++++++++++++++++-------- src/cli/run_command.zig | 4 +- src/fd.zig | 17 ++- src/glob.zig | 2 +- src/io/io.zig | 6 +- src/resolver/resolve_path.zig | 12 +- src/resolver/resolver.zig | 4 +- src/shell/interpreter.zig | 2 +- src/string_immutable.zig | 4 +- src/sys.zig | 60 ++++----- 14 files changed, 260 insertions(+), 126 deletions(-) diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index ed0f74893b..2ad65e05df 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -309,59 +309,59 @@ pub const FilePoll = struct { var ptr = poll.owner; switch (ptr.tag()) { @field(Owner.Tag, bun.meta.typeBaseName(@typeName(FIFO))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) FIFO", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) FIFO", .{poll.fd}); ptr.as(FIFO).ready(size_or_offset, poll.flags.contains(.hup)); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedInput))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedInput", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedInput", .{poll.fd}); ptr.as(ShellBufferedInput).onPoll(size_or_offset, 0); }, @field(Owner.Tag, "Subprocess") => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) Subprocess", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Subprocess", .{poll.fd}); var loader = ptr.as(JSC.Subprocess); loader.onExitNotificationTask(); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedWriter))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedWriter", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedWriter", .{poll.fd}); var loader = ptr.as(ShellBufferedWriter); loader.onPoll(size_or_offset, 0); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedWriterMini))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedWriterMini", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedWriterMini", .{poll.fd}); var loader = ptr.as(ShellBufferedWriterMini); loader.onPoll(size_or_offset, 0); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocessCapturedBufferedWriter))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocessCapturedBufferedWriter", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocessCapturedBufferedWriter", .{poll.fd}); var loader = ptr.as(ShellSubprocessCapturedBufferedWriter); loader.onPoll(size_or_offset, 0); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocessCapturedBufferedWriterMini))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocessCapturedBufferedWriterMini", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocessCapturedBufferedWriterMini", .{poll.fd}); var loader = ptr.as(ShellSubprocessCapturedBufferedWriterMini); loader.onPoll(size_or_offset, 0); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocess))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocess", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocess", .{poll.fd}); var loader = ptr.as(ShellSubprocess); loader.onExitNotificationTask(); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocessMini))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocessMini", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocessMini", .{poll.fd}); var loader = ptr.as(ShellSubprocessMini); loader.onExitNotificationTask(); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(JSC.WebCore.FileSink))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) FileSink", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) FileSink", .{poll.fd}); var loader = ptr.as(JSC.WebCore.FileSink); loader.onPoll(size_or_offset, 0); }, @field(Owner.Tag, "DNSResolver") => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) DNSResolver", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) DNSResolver", .{poll.fd}); var loader: *DNSResolver = ptr.as(DNSResolver); loader.onDNSPoll(poll); }, @@ -371,25 +371,25 @@ pub const FilePoll = struct { unreachable; } - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) GetAddrInfoRequest", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) GetAddrInfoRequest", .{poll.fd}); var loader: *GetAddrInfoRequest = ptr.as(GetAddrInfoRequest); loader.onMachportChange(); }, @field(Owner.Tag, "OutputReader") => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) OutputReader", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) OutputReader", .{poll.fd}); var output: *LifecycleScriptSubprocessOutputReader = ptr.as(LifecycleScriptSubprocessOutputReader); output.onPoll(size_or_offset); }, @field(Owner.Tag, "PidPollData") => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) LifecycleScriptSubprocess Pid", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) LifecycleScriptSubprocess Pid", .{poll.fd}); var loader: *bun.install.LifecycleScriptSubprocess = @ptrCast(ptr.as(LifecycleScriptSubprocessPid)); loader.onProcessUpdate(size_or_offset); }, else => { const possible_name = Owner.typeNameFromTag(@intFromEnum(ptr.tag())); - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) disconnected? (maybe: {s})", .{ poll.fd, possible_name orelse "" }); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) disconnected? (maybe: {s})", .{ poll.fd, possible_name orelse "" }); }, } } @@ -731,7 +731,7 @@ pub const FilePoll = struct { pub fn registerWithFd(this: *FilePoll, loop: *Loop, flag: Flags, one_shot: bool, fd: bun.FileDescriptor) JSC.Maybe(void) { const watcher_fd = loop.fd; - log("register: {s} ({d})", .{ @tagName(flag), fd }); + log("register: {s} ({})", .{ @tagName(flag), fd }); std.debug.assert(fd != invalid_fd); @@ -908,7 +908,7 @@ pub const FilePoll = struct { }; if (this.flags.contains(.needs_rearm) and !force_unregister) { - log("unregister: {s} ({d}) skipped due to needs_rearm", .{ @tagName(flag), fd }); + log("unregister: {s} ({}) skipped due to needs_rearm", .{ @tagName(flag), fd }); this.flags.remove(.poll_process); this.flags.remove(.poll_readable); this.flags.remove(.poll_process); @@ -916,7 +916,7 @@ pub const FilePoll = struct { return JSC.Maybe(void).success; } - log("unregister: {s} ({d})", .{ @tagName(flag), fd }); + log("unregister: {s} ({})", .{ @tagName(flag), fd }); if (comptime Environment.isLinux) { const ctl = linux.epoll_ctl( diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 22f6db7395..24fe68d0da 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -1519,7 +1519,7 @@ pub fn NewFileSink(comptime EventLoop: JSC.EventLoopKind) type { remain = remain[res.result..]; total += res.result; - log("Wrote {d} bytes (fd: {d}, head: {d}, {d}/{d})", .{ res.result, fd, this.head, remain.len, total }); + log("Wrote {d} bytes (fd: {}, head: {d}, {d}/{d})", .{ res.result, fd, this.head, remain.len, total }); if (res.result == 0) { if (this.poll_ref) |poll| { diff --git a/src/bun.zig b/src/bun.zig index 015ba67584..605855ddba 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -466,7 +466,7 @@ pub fn isReadable(fd: FileDescriptor) PollFlag { }; const result = (std.os.poll(&polls, 0) catch 0) != 0; - global_scope_log("poll({d}) readable: {any} ({d})", .{ fd, result, polls[0].revents }); + global_scope_log("poll({}) readable: {any} ({d})", .{ fd, result, polls[0].revents }); return if (result and polls[0].revents & std.os.POLL.HUP != 0) PollFlag.hup else if (result) @@ -487,7 +487,7 @@ pub fn isWritable(fd: FileDescriptor) PollFlag { }; const rc = std.os.windows.ws2_32.WSAPoll(&polls, 1, 0); const result = (if (rc != std.os.windows.ws2_32.SOCKET_ERROR) @as(usize, @intCast(rc)) else 0) != 0; - global_scope_log("poll({d}) writable: {any} ({d})", .{ fd, result, polls[0].revents }); + global_scope_log("poll({}) writable: {any} ({d})", .{ fd, result, polls[0].revents }); if (result and polls[0].revents & std.os.POLL.WRNORM != 0) { return .hup; } else if (result) { @@ -507,7 +507,7 @@ pub fn isWritable(fd: FileDescriptor) PollFlag { }; const result = (std.os.poll(&polls, 0) catch 0) != 0; - global_scope_log("poll({d}) writable: {any} ({d})", .{ fd, result, polls[0].revents }); + global_scope_log("poll({}) writable: {any} ({d})", .{ fd, result, polls[0].revents }); if (result and polls[0].revents & std.os.POLL.HUP != 0) { return .hup; } else if (result) { diff --git a/src/cache.zig b/src/cache.zig index 42e990ccbd..81a6c5b5af 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -175,7 +175,7 @@ pub const Fs = struct { error.ENOENT => { const handle = try bun.openFile(path, .{ .mode = .read_only }); Output.prettyErrorln( - "Internal error: directory mismatch for directory \"{s}\", fd {d}. You don't need to do anything, but this indicates a bug.", + "Internal error: directory mismatch for directory \"{s}\", fd {}. You don't need to do anything, but this indicates a bug.", .{ path, dirname_fd }, ); break :brk bun.toFD(handle.handle); @@ -189,7 +189,7 @@ pub const Fs = struct { } if (comptime !Environment.isWindows) // skip on Windows because NTCreateFile will do it. - debug("openat({d}, {s}) = {}", .{ dirname_fd, path, bun.toFD(file_handle.handle) }); + debug("openat({}, {s}) = {}", .{ dirname_fd, path, bun.toFD(file_handle.handle) }); const will_close = rfs.needToCloseFiles() and _file_handle == null; defer { diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index a43c4b280e..1e534d4fd1 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -54,9 +54,44 @@ pub const BunxCommand = struct { return new_str; } + const seconds_cache_valid = 60 * 60 * 24; // 1 day + const nanoseconds_cache_valid = seconds_cache_valid * 1000000000; + fn getBinNameFromSubpath(bundler: *bun.Bundler, dir_fd: bun.FileDescriptor, subpath_z: [:0]const u8) ![]const u8 { const target_package_json_fd = try std.os.openatZ(dir_fd.cast(), subpath_z, std.os.O.RDONLY, 0); const target_package_json = std.fs.File{ .handle = target_package_json_fd }; + + const is_stale = is_stale: { + if (Environment.isWindows) { + var io_status_block: std.os.windows.IO_STATUS_BLOCK = undefined; + var info: std.os.windows.FILE_BASIC_INFORMATION = undefined; + const rc = std.os.windows.ntdll.NtQueryInformationFile(target_package_json_fd, &io_status_block, &info, @sizeOf(std.os.windows.FILE_BASIC_INFORMATION), .FileBasicInformation); + switch (rc) { + .SUCCESS => { + const time = std.os.windows.fromSysTime(info.LastWriteTime); + const now = std.time.nanoTimestamp(); + break :is_stale (now - time > nanoseconds_cache_valid); + }, + // treat failures to stat as stale + else => break :is_stale true, + } + } else { + var stat: std.os.Stat = undefined; + const rc = std.c.fstat(target_package_json_fd, &stat); + if (rc != 0) { + break :is_stale true; + } + break :is_stale std.time.timestamp() - stat.mtime().tv_sec > seconds_cache_valid; + } + }; + + if (is_stale) { + target_package_json.close(); + // If delete fails, oh well. Hope installation takes care of it. + dir_fd.asDir().deleteTree(subpath_z) catch {}; + return error.NeedToInstall; + } + defer target_package_json.close(); const package_json_contents = try target_package_json.readToEndAlloc(bundler.allocator, std.math.maxInt(u32)); @@ -158,7 +193,7 @@ pub const BunxCommand = struct { }; } - fn exit_with_usage() noreturn { + fn exitWithUsage() noreturn { Command.Tag.printHelp(.BunxCommand, false); Global.exit(1); } @@ -202,7 +237,7 @@ pub const BunxCommand = struct { // check if package_name_for_update_request is empty string or " " if (package_name_for_update_request[0].len == 0) { - exit_with_usage(); + exitWithUsage(); } const update_requests = bun.PackageManager.UpdateRequest.parse( @@ -214,7 +249,7 @@ pub const BunxCommand = struct { ); if (update_requests.len == 0) { - exit_with_usage(); + exitWithUsage(); } // this shouldn't happen @@ -321,28 +356,56 @@ pub const BunxCommand = struct { ctx.allocator.free(PATH_FOR_BIN_DIRS); } } - if (PATH.len > 0) { - PATH = try std.fmt.allocPrint( + + // The bunx cache path is at the following location + // + // /bunx--/node_modules/.bin/ + // + // Reasoning: + // - Prefix with "bunx" to identify the bunx cache, make it easier to "rm -r" + // - Suffix would not work because scoped packages have a "/" in them, and + // before Bun 1.1 this was practically impossible to clear the cache manually. + // It was easier to just remove the entire temp directory. + // - Use the uid to prevent conflicts between users. If the paths were the same + // across users, you run into permission conflicts + // - If you set permission to 777, you run into a potential attack vector + // where a user can replace the directory with malicious code. + const uid = if (bun.Environment.isPosix) bun.C.getuid() else windowsUserUniqueId(); + PATH = switch (PATH.len > 0) { + inline else => |path_is_nonzero| try std.fmt.allocPrint( ctx.allocator, - bun.pathLiteral("{s}/{s}--bunx/node_modules/.bin:{s}"), - .{ temp_dir, package_fmt, PATH }, - ); - } else { - PATH = try std.fmt.allocPrint( - ctx.allocator, - bun.pathLiteral("{s}/{s}--bunx/node_modules/.bin"), - .{ temp_dir, package_fmt }, - ); - } + bun.pathLiteral("{s}/bunx-{d}-{s}/node_modules/.bin{s}{s}"), + .{ + temp_dir, + uid, + package_fmt, + if (path_is_nonzero) ":" else "", + PATH, + }, + ), + }; + try this_bundler.env.map.put("PATH", PATH); - const bunx_cache_dir = PATH[0 .. temp_dir.len + "/--bunx".len + package_fmt.len]; + const bunx_cache_dir = PATH[0 .. temp_dir.len + + "/bunx--".len + + package_fmt.len + + if (Environment.isPosix) + std.fmt.count("{d}", .{uid}) + else + 0]; var absolute_in_cache_dir_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - var absolute_in_cache_dir = std.fmt.bufPrint(&absolute_in_cache_dir_buf, bun.pathLiteral("{s}/node_modules/.bin/{s}"), .{ bunx_cache_dir, initial_bin_name }) catch unreachable; + var absolute_in_cache_dir = std.fmt.bufPrint( + &absolute_in_cache_dir_buf, + bun.pathLiteral("{s}/node_modules/.bin/{s}"), + .{ bunx_cache_dir, initial_bin_name }, + ) catch unreachable; const passthrough = passthrough_list.items; - if (update_request.version.literal.isEmpty() or update_request.version.tag != .dist_tag) { + var do_cache_bust = update_request.version.tag == .dist_tag; + + if (update_request.version.literal.isEmpty() or update_request.version.tag != .dist_tag) try_run_existing: { var destination_: ?[:0]const u8 = null; // Only use the system-installed version if there is no version specified @@ -365,7 +428,48 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - _ = try Run.runBinary( + + // If this directory was installed by bunx, we want to perform cache invalidation on it + // this way running `bunx hello` will update hello automatically to the latest version + if (bun.strings.hasPrefix(out, bunx_cache_dir)) { + const is_stale = is_stale: { + if (Environment.isWindows) { + const fd = bun.sys.openat(bun.invalid_fd, destination, std.os.O.RDONLY, 0).unwrap() catch { + // if we cant open this, we probably will just fail when we run it + // and that error message is likely going to be better than the one from `bun add` + break :is_stale false; + }; + defer _ = bun.sys.close(fd); + + var io_status_block: std.os.windows.IO_STATUS_BLOCK = undefined; + var info: std.os.windows.FILE_BASIC_INFORMATION = undefined; + const rc = std.os.windows.ntdll.NtQueryInformationFile(fd.cast(), &io_status_block, &info, @sizeOf(std.os.windows.FILE_BASIC_INFORMATION), .FileBasicInformation); + switch (rc) { + .SUCCESS => { + const time = std.os.windows.fromSysTime(info.LastWriteTime); + const now = std.time.nanoTimestamp(); + break :is_stale (now - time > nanoseconds_cache_valid); + }, + // treat failures to stat as stale + else => break :is_stale true, + } + } else { + var stat: std.os.Stat = undefined; + const rc = std.c.stat(destination, &stat); + if (rc != 0) { + break :is_stale true; + } + break :is_stale std.time.timestamp() - stat.mtime().tv_sec > seconds_cache_valid; + } + }; + + if (is_stale) { + do_cache_bust = true; + break :try_run_existing; + } + } + + try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), this_bundler.fs.top_level_dir, @@ -373,11 +477,12 @@ pub const BunxCommand = struct { passthrough, null, ); - // we are done! - Global.exit(0); + // runBinary is noreturn + comptime unreachable; } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use + // TODO: root_dir_fd was observed on Windows to be zero, which is incorrect. figure out why const root_dir_fd = root_dir_info.getFileDescriptor(); if (root_dir_fd != .zero) { if (getBinName(&this_bundler, root_dir_fd, bunx_cache_dir, initial_bin_name)) |package_name_for_bin| { @@ -402,7 +507,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - _ = try Run.runBinary( + try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), this_bundler.fs.top_level_dir, @@ -410,8 +515,8 @@ pub const BunxCommand = struct { passthrough, null, ); - // we are done! - Global.exit(0); + // runBinary is noreturn + comptime unreachable; } } } else |err| { @@ -423,15 +528,7 @@ pub const BunxCommand = struct { } } - const bunx_install_dir_path = try std.fmt.allocPrint( - ctx.allocator, - "{s}/{s}--bunx", - .{ temp_dir, package_fmt }, - ); - - // TODO: fix this after zig upgrade - const bunx_install_iterable_dir = try std.fs.cwd().makeOpenPath(bunx_install_dir_path, .{}); - var bunx_install_dir = bunx_install_iterable_dir; + const bunx_install_dir = try std.fs.cwd().makeOpenPath(bunx_cache_dir, .{}); create_package_json: { // create package.json, but only if it doesn't exist @@ -441,18 +538,28 @@ pub const BunxCommand = struct { } var args_buf = [_]string{ - try std.fs.selfExePathAlloc(ctx.allocator), "add", "--no-summary", + try std.fs.selfExePathAlloc(ctx.allocator), + "add", + "--no-summary", package_fmt, + + // the following two args are stripped off if `do_cache_bust` is false + // disable the manifest cache when a tag is specified // so that @latest is fetched from the registry - "--no-cache", + "--no-cache", + // forcefully re-install packages in this mode too + "--force", }; - const argv_to_use: []const string = args_buf[0 .. args_buf.len - @as(usize, @intFromBool(update_request.version.tag != .dist_tag))]; + const argv_to_use: []const string = args_buf[0 .. args_buf.len - 2 * @as(usize, @intFromBool(!do_cache_bust))]; var child_process = std.ChildProcess.init(argv_to_use, default_allocator); - child_process.cwd = bunx_install_dir_path; child_process.cwd_dir = bunx_install_dir; + // https://github.com/ziglang/zig/issues/5190 + if (Environment.isWindows) { + child_process.cwd = bunx_cache_dir; + } const env_map = try this_bundler.env.map.cloneToEnvMap(ctx.allocator); child_process.env_map = &env_map; child_process.stderr_behavior = .Inherit; @@ -473,19 +580,15 @@ pub const BunxCommand = struct { Global.exit(exit_code); } }, - .Signal => |signal| { - Global.exit(@as(u7, @truncate(signal))); + .Signal, .Stopped => |signal| { + Global.raiseIgnoringPanicHandler(signal); }, - .Stopped => |signal| { - Global.exit(@as(u7, @truncate(signal))); - }, - // shouldn't happen - else => { + .Unknown => { Global.exit(1); }, } - absolute_in_cache_dir = std.fmt.bufPrint(&absolute_in_cache_dir_buf, "{s}/node_modules/.bin/{s}", .{ bunx_cache_dir, initial_bin_name }) catch unreachable; + absolute_in_cache_dir = std.fmt.bufPrint(&absolute_in_cache_dir_buf, bun.pathLiteral("{s}/node_modules/.bin/{s}"), .{ bunx_cache_dir, initial_bin_name }) catch unreachable; // Similar to "npx": // @@ -498,7 +601,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - _ = try Run.runBinary( + try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), this_bundler.fs.top_level_dir, @@ -506,8 +609,8 @@ pub const BunxCommand = struct { passthrough, null, ); - // we are done! - Global.exit(0); + // runBinary is noreturn + comptime unreachable; } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use @@ -522,7 +625,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - _ = try Run.runBinary( + try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), this_bundler.fs.top_level_dir, @@ -530,8 +633,8 @@ pub const BunxCommand = struct { passthrough, null, ); - // we are done! - Global.exit(0); + // runBinary is noreturn + comptime unreachable; } } } else |_| {} @@ -540,3 +643,25 @@ pub const BunxCommand = struct { Global.exit(1); } }; + +extern fn GetUserNameW( + lpBuffer: bun.windows.LPWSTR, + pcbBuffer: bun.windows.LPDWORD, +) bun.windows.BOOL; + +/// Is not the actual UID of the user, but just a hash of username. +fn windowsUserUniqueId() u32 { + // https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-tsch/165836c1-89d7-4abb-840d-80cf2510aa3e + // UNLEN + 1 + var buf: [257]u16 = undefined; + var size: u32 = buf.len; + if (GetUserNameW(@ptrCast(&buf), &size) == 0) { + if (Environment.isDebug) std.debug.panic("GetUserNameW failed: {}", .{bun.windows.GetLastError()}); + return 0; + } + const name = buf[0..size]; + if (Environment.isWindows) { + Output.scoped(.windowsUserUniqueId, false)("username: {}", .{std.unicode.fmtUtf16le(name)}); + } + return bun.hash32(std.mem.sliceAsBytes(name)); +} diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index bd7cf01124..4a96db87ce 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -413,7 +413,7 @@ pub const RunCommand = struct { env: *DotEnv.Loader, passthrough: []const string, original_script_for_bun_run: ?[]const u8, - ) !bool { + ) !noreturn { var argv_ = [_]string{executable}; var argv: []const string = &argv_; @@ -509,8 +509,6 @@ pub const RunCommand = struct { Global.exit(1); }, } - - return true; } pub fn ls(ctx: Command.Context) !void { diff --git a/src/fd.zig b/src/fd.zig index 28cec3987c..334d62107d 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -303,13 +303,18 @@ pub const FDImpl = packed struct { } pub fn format(this: FDImpl, comptime fmt: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - if (fmt.len == 1 and fmt[0] == 'd') { - try writer.print("{d}", .{this.system()}); - return; - } - if (fmt.len != 0) { - @compileError("invalid format string for FDImpl.format. must be either '' or 'd'"); + // The reason for this error is because formatting FD as an integer on windows is + // ambiguous and almost certainly a mistake. You probably meant to format fd.cast(). + // + // Remember this formatter will + // - on posix, print the numebr + // - on windows, print if it is a handle or a libuv file descriptor + // - in debug on all platforms, print the path of the file descriptor + // + // Not having this error caused a linux+debug only crash in bun.sys.getFdPath because + // we forgot to change the thing being printed to "fd.cast()" when FDImpl was introduced. + @compileError("invalid format string for FDImpl.format. must be empty like '{}'"); } if (!this.isValid()) { diff --git a/src/glob.zig b/src/glob.zig index d7b18edff8..df0d4c8289 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -339,7 +339,7 @@ pub fn GlobWalker_( }; }; - log("Transition(dirpath={s}, fd={d}, component_idx={d})", .{ dir_path, fd, component_idx }); + log("Transition(dirpath={s}, fd={}, component_idx={d})", .{ dir_path, fd, component_idx }); this.iter_state.directory.fd = fd; const iterator = DirIterator.iterate(fd.asDir(), .u8); diff --git a/src/io/io.zig b/src/io/io.zig index 1aef8995c0..d2d8494504 100644 --- a/src/io/io.zig +++ b/src/io/io.zig @@ -192,7 +192,7 @@ pub const Loop = struct { const current_events: []std.os.linux.epoll_event = events[0..rc]; if (rc != 0) { - log("epoll_wait({d}) = {d}", .{ this.pollfd(), rc }); + log("epoll_wait({}) = {d}", .{ this.pollfd(), rc }); } for (current_events) |event| { @@ -743,7 +743,7 @@ pub const Poll = struct { fd: bun.FileDescriptor, kqueue_event: *std.os.system.kevent64_s, ) void { - log("register({s}, {d})", .{ @tagName(action), fd }); + log("register({s}, {})", .{ @tagName(action), fd }); defer { switch (comptime action) { .readable => poll.flags.insert(Flags.poll_readable), @@ -873,7 +873,7 @@ pub const Poll = struct { pub fn registerForEpoll(this: *Poll, tag: Pollable.Tag, loop: *Loop, comptime flag: Flags, one_shot: bool, fd: bun.FileDescriptor) JSC.Maybe(void) { const watcher_fd = loop.pollfd(); - log("register: {s} ({d})", .{ @tagName(flag), fd }); + log("register: {s} ({})", .{ @tagName(flag), fd }); std.debug.assert(fd != bun.invalid_fd); diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 24d4c31442..36cb8a70c6 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -2119,7 +2119,9 @@ pub const PosixToWinNormalizer = struct { const source_root = windowsFilesystemRoot(source_dir); @memcpy(buf[0..source_root.len], source_root); @memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]); - return buf[0 .. source_root.len + maybe_posix_path.len - 1]; + const res = buf[0 .. source_root.len + maybe_posix_path.len - 1]; + std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res)); + return res; } } } @@ -2142,7 +2144,9 @@ pub const PosixToWinNormalizer = struct { const source_root = windowsFilesystemRoot(cwd); std.debug.assert(source_root.ptr == source_root.ptr); @memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]); - return buf[0 .. source_root.len + maybe_posix_path.len - 1]; + const res = buf[0 .. source_root.len + maybe_posix_path.len - 1]; + std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res)); + return res; } } } @@ -2167,7 +2171,9 @@ pub const PosixToWinNormalizer = struct { std.debug.assert(source_root.ptr == source_root.ptr); @memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]); buf[source_root.len + maybe_posix_path.len - 1] = 0; - return buf[0 .. source_root.len + maybe_posix_path.len - 1 :0]; + const res = buf[0 .. source_root.len + maybe_posix_path.len - 1 :0]; + std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res)); + return res; } } } diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 15ca1d79a3..a87015bbc7 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -2118,7 +2118,7 @@ pub const Resolver = struct { dir_entries_ptr.fd = bun.toFD(open_dir.fd); } - bun.fs.debug("readdir({d}, {s}) = {d}", .{ bun.toFD(open_dir.fd), dir_path, dir_entries_ptr.data.count() }); + bun.fs.debug("readdir({}, {s}) = {d}", .{ bun.toFD(open_dir.fd), dir_path, dir_entries_ptr.data.count() }); dir_entries_option = rfs.entries.put(&cached_dir_entry_result, .{ .entries = dir_entries_ptr, @@ -2779,7 +2779,7 @@ pub const Resolver = struct { dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{ .entries = dir_entries_ptr, }); - bun.fs.debug("readdir({d}, {s}) = {d}", .{ bun.toFD(open_dir.fd), dir_path, dir_entries_ptr.data.count() }); + bun.fs.debug("readdir({}, {s}) = {d}", .{ bun.toFD(open_dir.fd), dir_path, dir_entries_ptr.data.count() }); } // We must initialize it as empty so that the result index is correct. diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index e46e3f0fef..c3458d6c17 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -7698,7 +7698,7 @@ pub fn MaybeChild(comptime T: type) type { pub fn closefd(fd: bun.FileDescriptor) void { if (Syscall.close2(fd)) |err| { _ = err; - log("ERR closefd: {d}\n", .{fd}); + log("ERR closefd: {}\n", .{fd}); // stderr_mutex.lock(); // defer stderr_mutex.unlock(); // const stderr = std.io.getStdErr().writer(); diff --git a/src/string_immutable.zig b/src/string_immutable.zig index b58ad59990..0efe7e6019 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1827,11 +1827,11 @@ pub fn assertIsValidWindowsPath(comptime T: type, path: []const T) void { if (bun.path.Platform.windows.isAbsoluteT(T, path) and isWindowsAbsolutePathMissingDriveLetter(T, path)) { - std.debug.panic("Do not pass posix paths to windows APIs, was given '{s}' (missing a root like 'C:\\', see PosixToWinNormalizer for why this is an assertion)", .{ + std.debug.panic("Internal Error: Do not pass posix paths to Windows APIs, was given '{s}'" ++ if (Environment.isDebug) " (missing a root like 'C:\\', see PosixToWinNormalizer for why this is an assertion)" else ". Please open an issue on GitHub with a reproduction.", .{ if (T == u8) path else std.unicode.fmtUtf16le(path), }); } - if (hasPrefixComptimeType(T, path, ":/")) { + if (hasPrefixComptimeType(T, path, ":/") and Environment.isDebug) { std.debug.panic("Path passed to windows API '{s}' is almost certainly invalid. Where did the drive letter go?", .{ if (T == u8) path else std.unicode.fmtUtf16le(path), }); diff --git a/src/sys.zig b/src/sys.zig index 2a45295483..a7eab5a02e 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -412,7 +412,7 @@ pub fn fstat(fd: bun.FileDescriptor) Maybe(bun.Stat) { const rc = fstatSym(fd.cast(), &stat_); if (comptime Environment.allow_assert) - log("fstat({d}) = {d}", .{ fd, rc }); + log("fstat({}) = {d}", .{ fd, rc }); if (Maybe(bun.Stat).errnoSys(rc, .fstat)) |err| return err; return Maybe(bun.Stat){ .result = stat_ }; @@ -758,12 +758,12 @@ pub fn openFileAtWindowsNtPath( // - access_mask probably needs w.SYNCHRONIZE, // - options probably needs w.FILE_SYNCHRONOUS_IO_NONALERT // - disposition probably needs w.FILE_OPEN - bun.Output.debugWarn("NtCreateFile({d}, {}) = {s} (file) = {d}\nYou are calling this function with the wrong flags!!!", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); + bun.Output.debugWarn("NtCreateFile({}, {}) = {s} (file) = {d}\nYou are calling this function with the wrong flags!!!", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); } else if (rc == .OBJECT_PATH_SYNTAX_BAD or rc == .OBJECT_NAME_INVALID) { // See above comment. For absolute paths you must have \??\ at the start. - bun.Output.debugWarn("NtCreateFile({d}, {}) = {s} (file) = {d}\nYou are calling this function without normalizing the path correctly!!!", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); + bun.Output.debugWarn("NtCreateFile({}, {}) = {s} (file) = {d}\nYou are calling this function without normalizing the path correctly!!!", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); } else { - log("NtCreateFile({d}, {}) = {s} (file) = {d}", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); + log("NtCreateFile({}, {}) = {s} (file) = {d}", .{ dir, bun.fmt.fmtUTF16(path), @tagName(rc), @intFromPtr(result) }); } } @@ -908,7 +908,7 @@ pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flag // https://opensource.apple.com/source/xnu/xnu-7195.81.3/libsyscall/wrappers/open-base.c const rc = system.@"openat$NOCANCEL"(dirfd.cast(), file_path.ptr, @as(c_uint, @intCast(flags)), @as(c_int, @intCast(perm))); if (comptime Environment.allow_assert) - log("openat({d}, {s}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), rc }); + log("openat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), rc }); return Maybe(bun.FileDescriptor).errnoSys(rc, .open) orelse .{ .result = bun.toFD(rc) }; } else if (comptime Environment.isWindows) { @@ -918,7 +918,7 @@ pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flag while (true) { const rc = Syscall.system.openat(dirfd.cast(), file_path, flags, perm); if (comptime Environment.allow_assert) - log("openat({d}, {s}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), rc }); + log("openat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), rc }); return switch (Syscall.getErrno(rc)) { .SUCCESS => .{ .result = bun.toFD(rc) }, .INTR => continue, @@ -984,7 +984,7 @@ pub fn close(fd: bun.FileDescriptor) ?Syscall.Error { pub fn close2(fd: bun.FileDescriptor) ?Syscall.Error { if (fd == bun.STDOUT_FD or fd == bun.STDERR_FD or fd == bun.STDIN_FD) { - log("close({d}) SKIPPED", .{fd}); + log("close({}) SKIPPED", .{fd}); return null; } @@ -1008,7 +1008,7 @@ pub fn write(fd: bun.FileDescriptor, bytes: []const u8) Maybe(usize) { return switch (Environment.os) { .mac => { const rc = system.@"write$NOCANCEL"(fd.cast(), bytes.ptr, adjusted_len); - log("write({d}, {d}) = {d}", .{ fd, adjusted_len, rc }); + log("write({}, {d}) = {d}", .{ fd, adjusted_len, rc }); if (Maybe(usize).errnoSysFd(rc, .write, fd)) |err| { return err; @@ -1019,7 +1019,7 @@ pub fn write(fd: bun.FileDescriptor, bytes: []const u8) Maybe(usize) { .linux => { while (true) { const rc = sys.write(fd.cast(), bytes.ptr, adjusted_len); - log("write({d}, {d}) = {d}", .{ fd, adjusted_len, rc }); + log("write({}, {d}) = {d}", .{ fd, adjusted_len, rc }); if (Maybe(usize).errnoSysFd(rc, .write, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1040,7 +1040,7 @@ pub fn write(fd: bun.FileDescriptor, bytes: []const u8) Maybe(usize) { &bytes_written, null, ); - log("WriteFile({d}, {d}) = {d} (written: {d})", .{ @intFromPtr(fd.cast()), adjusted_len, rc, bytes_written }); + log("WriteFile({}, {d}) = {d} (written: {d})", .{ fd, adjusted_len, rc, bytes_written }); if (rc == 0) { return .{ .err = Syscall.Error{ @@ -1068,7 +1068,7 @@ pub fn writev(fd: bun.FileDescriptor, buffers: []std.os.iovec) Maybe(usize) { if (comptime Environment.isMac) { const rc = writev_sym(fd.cast(), @as([*]std.os.iovec_const, @ptrCast(buffers.ptr)), @as(i32, @intCast(buffers.len))); if (comptime Environment.allow_assert) - log("writev({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("writev({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .writev, fd)) |err| { return err; @@ -1079,7 +1079,7 @@ pub fn writev(fd: bun.FileDescriptor, buffers: []std.os.iovec) Maybe(usize) { while (true) { const rc = writev_sym(fd.cast(), @as([*]std.os.iovec_const, @ptrCast(buffers.ptr)), buffers.len); if (comptime Environment.allow_assert) - log("writev({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("writev({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .writev, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1099,7 +1099,7 @@ pub fn pwritev(fd: bun.FileDescriptor, buffers: []const bun.PlatformIOVecConst, if (comptime Environment.isMac) { const rc = pwritev_sym(fd.cast(), buffers.ptr, @as(i32, @intCast(buffers.len)), position); if (comptime Environment.allow_assert) - log("pwritev({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("pwritev({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .pwritev, fd)) |err| { return err; @@ -1110,7 +1110,7 @@ pub fn pwritev(fd: bun.FileDescriptor, buffers: []const bun.PlatformIOVecConst, while (true) { const rc = pwritev_sym(fd.cast(), buffers.ptr, buffers.len, position); if (comptime Environment.allow_assert) - log("pwritev({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("pwritev({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .pwritev, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1133,7 +1133,7 @@ pub fn readv(fd: bun.FileDescriptor, buffers: []std.os.iovec) Maybe(usize) { if (comptime Environment.isMac) { const rc = readv_sym(fd.cast(), buffers.ptr, @as(i32, @intCast(buffers.len))); if (comptime Environment.allow_assert) - log("readv({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("readv({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .readv, fd)) |err| { return err; @@ -1144,7 +1144,7 @@ pub fn readv(fd: bun.FileDescriptor, buffers: []std.os.iovec) Maybe(usize) { while (true) { const rc = readv_sym(fd.cast(), buffers.ptr, buffers.len); if (comptime Environment.allow_assert) - log("readv({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("readv({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .readv, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1167,7 +1167,7 @@ pub fn preadv(fd: bun.FileDescriptor, buffers: []std.os.iovec, position: isize) if (comptime Environment.isMac) { const rc = preadv_sym(fd.cast(), buffers.ptr, @as(i32, @intCast(buffers.len)), position); if (comptime Environment.allow_assert) - log("preadv({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("preadv({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .preadv, fd)) |err| { return err; @@ -1178,7 +1178,7 @@ pub fn preadv(fd: bun.FileDescriptor, buffers: []std.os.iovec, position: isize) while (true) { const rc = preadv_sym(fd.cast(), buffers.ptr, buffers.len, position); if (comptime Environment.allow_assert) - log("preadv({d}, {d}) = {d}", .{ fd, veclen(buffers), rc }); + log("preadv({}, {d}) = {d}", .{ fd, veclen(buffers), rc }); if (Maybe(usize).errnoSysFd(rc, .preadv, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1286,7 +1286,7 @@ pub fn read(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { .mac => { const rc = system.@"read$NOCANCEL"(fd.cast(), buf.ptr, adjusted_len); - log("read({d}, {d}) = {d} ({any})", .{ fd, adjusted_len, rc, debug_timer }); + log("read({}, {d}) = {d} ({any})", .{ fd, adjusted_len, rc, debug_timer }); if (Maybe(usize).errnoSysFd(rc, .read, fd)) |err| { return err; @@ -1297,7 +1297,7 @@ pub fn read(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { .linux => { while (true) { const rc = sys.read(fd.cast(), buf.ptr, adjusted_len); - log("read({d}, {d}) = {d} ({any})", .{ fd, adjusted_len, rc, debug_timer }); + log("read({}, {d}) = {d} ({any})", .{ fd, adjusted_len, rc, debug_timer }); if (Maybe(usize).errnoSysFd(rc, .read, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1321,7 +1321,7 @@ pub fn recv(fd: bun.FileDescriptor, buf: []u8, flag: u32) Maybe(usize) { if (comptime Environment.isMac) { const rc = system.@"recvfrom$NOCANCEL"(fd.cast(), buf.ptr, adjusted_len, flag, null, null); - log("recv({d}, {d}, {d}) = {d}", .{ fd, adjusted_len, flag, rc }); + log("recv({}, {d}, {d}) = {d}", .{ fd, adjusted_len, flag, rc }); if (Maybe(usize).errnoSys(rc, .recv)) |err| { return err; @@ -1331,7 +1331,7 @@ pub fn recv(fd: bun.FileDescriptor, buf: []u8, flag: u32) Maybe(usize) { } else { while (true) { const rc = linux.recvfrom(fd.cast(), buf.ptr, adjusted_len, flag | os.SOCK.CLOEXEC | linux.MSG.CMSG_CLOEXEC, null, null); - log("recv({d}, {d}, {d}) = {d}", .{ fd, adjusted_len, flag, rc }); + log("recv({}, {d}, {d}) = {d}", .{ fd, adjusted_len, flag, rc }); if (Maybe(usize).errnoSysFd(rc, .recv, fd)) |err| { if (err.getErrno() == .INTR) continue; @@ -1435,11 +1435,11 @@ pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.Fi if (Maybe(void).errnoSys(sys.renameat(from_dir.cast(), from, to_dir.cast(), to), .rename)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) - log("renameat({d}, {s}, {d}, {s}) = {d}", .{ from_dir, from, to_dir, to, @intFromEnum(err.getErrno()) }); + log("renameat({}, {s}, {}, {s}) = {d}", .{ from_dir, from, to_dir, to, @intFromEnum(err.getErrno()) }); return err; } if (comptime Environment.allow_assert) - log("renameat({d}, {s}, {d}, {s}) = {d}", .{ from_dir, from, to_dir, to, 0 }); + log("renameat({}, {s}, {}, {s}) = {d}", .{ from_dir, from, to_dir, to, 0 }); return Maybe(void).success; } } @@ -1531,11 +1531,11 @@ pub fn unlinkatWithFlags(dirfd: bun.FileDescriptor, to: anytype, flags: c_uint) if (Maybe(void).errnoSys(sys.unlinkat(dirfd.cast(), to, flags), .unlink)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) - log("unlinkat({d}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); + log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); return err; } if (comptime Environment.allow_assert) - log("unlinkat({d}, {s}) = 0", .{ dirfd, bun.sliceTo(to, 0) }); + log("unlinkat({}, {s}) = 0", .{ dirfd, bun.sliceTo(to, 0) }); return Maybe(void).success; } unreachable; @@ -1549,11 +1549,11 @@ pub fn unlinkat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) { if (Maybe(void).errnoSys(sys.unlinkat(dirfd.cast(), to, 0), .unlink)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) - log("unlinkat({d}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); + log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); return err; } if (comptime Environment.allow_assert) - log("unlinkat({d}, {s}) = 0", .{ dirfd, bun.sliceTo(to, 0) }); + log("unlinkat({}, {s}) = 0", .{ dirfd, bun.sliceTo(to, 0) }); return Maybe(void).success; } } @@ -1582,7 +1582,7 @@ pub fn getFdPath(fd: bun.FileDescriptor, out_buffer: *[MAX_PATH_BYTES]u8) Maybe( .linux => { // TODO: alpine linux may not have /proc/self var procfs_buf: ["/proc/self/fd/-2147483648".len:0]u8 = undefined; - const proc_path = std.fmt.bufPrintZ(procfs_buf[0..], "/proc/self/fd/{d}\x00", .{fd}) catch unreachable; + const proc_path = std.fmt.bufPrintZ(procfs_buf[0..], "/proc/self/fd/{d}\x00", .{fd.cast()}) catch unreachable; return switch (readlink(proc_path, out_buffer)) { .err => |err| return .{ .err = err }, @@ -1943,7 +1943,7 @@ pub fn dup(fd: bun.FileDescriptor) Maybe(bun.FileDescriptor) { } const out = std.c.dup(fd.cast()); - log("dup({d}) = {d}", .{ fd.cast(), out }); + log("dup({}) = {d}", .{ fd, out }); return Maybe(bun.FileDescriptor).errnoSysFd(out, .dup, fd) orelse Maybe(bun.FileDescriptor){ .result = bun.toFD(out) }; } From a0be3cb2ff3f609f6d82b4951d886e6824be01f8 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 20 Feb 2024 18:58:33 -0800 Subject: [PATCH 05/21] Slightly reduce code duplication in expect (#9018) Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- src/bun.js/test/expect.zig | 370 ++++++------------------------------- 1 file changed, 54 insertions(+), 316 deletions(-) diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index 031e1ffc4a..990c1eb881 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -402,11 +402,7 @@ pub const Expect = struct { if (not) { const signature = comptime getSignature("pass", "", true); const fmt = signature ++ "\n\n{s}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{msg.slice()}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{msg.slice()}); + globalObject.throwPretty(fmt, .{msg.slice()}); return .zero; } @@ -453,11 +449,7 @@ pub const Expect = struct { const signature = comptime getSignature("fail", "", true); const fmt = signature ++ "\n\n{s}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{msg.slice()}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{msg.slice()}); + globalObject.throwPretty(fmt, .{msg.slice()}); return .zero; } @@ -493,11 +485,7 @@ pub const Expect = struct { if (not) { const signature = comptime getSignature("toBe", "expected", true); const fmt = signature ++ "\n\nExpected: not {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{right.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{right.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{right.toFmt(globalObject, &formatter)}); return .zero; } @@ -507,11 +495,7 @@ pub const Expect = struct { "\n\nIf this test should pass, replace \"toBe\" with \"toEqual\" or \"toStrictEqual\"" ++ "\n\nExpected: {any}\n" ++ "Received: serializes to the same string\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{right.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{right.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{right.toFmt(globalObject, &formatter)}); return .zero; } @@ -523,23 +507,12 @@ pub const Expect = struct { .not = not, }; const fmt = comptime signature ++ "\n\n{any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(comptime Output.prettyFmt(fmt, true), .{diff_format}); - return .zero; - } - globalObject.throw(comptime Output.prettyFmt(fmt, false), .{diff_format}); + globalObject.throwPretty(fmt, .{diff_format}); return .zero; } const fmt = signature ++ "\n\nExpected: {any}\nReceived: {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ - right.toFmt(globalObject, &formatter), - left.toFmt(globalObject, &formatter), - }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ + globalObject.throwPretty(fmt, .{ right.toFmt(globalObject, &formatter), left.toFmt(globalObject, &formatter), }); @@ -610,12 +583,7 @@ pub const Expect = struct { if (not) { const expected_line = "Expected length: not {d}\n"; const fmt = comptime getSignature("toHaveLength", "expected", true) ++ "\n\n" ++ expected_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_length}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{expected_length}); + globalObject.throwPretty(fmt, .{expected_length}); return .zero; } @@ -623,12 +591,8 @@ pub const Expect = struct { const received_line = "Received length: {d}\n"; const fmt = comptime getSignature("toHaveLength", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_length, actual_length }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_length, actual_length }); + globalObject.throwPretty(fmt, .{ expected_length, actual_length }); return .zero; } @@ -726,12 +690,7 @@ pub const Expect = struct { const expected_line = "Expected to contain: {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toContain", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -777,12 +736,7 @@ pub const Expect = struct { const expected_line = "Expected to contain: {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toContainKey", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -846,12 +800,7 @@ pub const Expect = struct { const expected_line = "Expected to contain: {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toContainKeys", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -915,12 +864,7 @@ pub const Expect = struct { const expected_line = "Expected to contain: {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toContainAnyKeys", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1052,23 +996,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeTruthy", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeTruthy", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1092,23 +1026,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeUndefined", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeUndefined", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1136,23 +1060,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeNaN", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeNaN", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1175,23 +1089,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeNull", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeNull", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1214,23 +1118,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeDefined", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeDefined", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1258,23 +1152,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeFalsy", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeFalsy", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1351,21 +1235,13 @@ pub const Expect = struct { if (not) { const signature = comptime getSignature("toStrictEqual", "expected", true); const fmt = signature ++ "\n\n{any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{diff_formatter}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{diff_formatter}); + globalObject.throwPretty(fmt, .{diff_formatter}); return .zero; } const signature = comptime getSignature("toStrictEqual", "expected", false); const fmt = signature ++ "\n\n{any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{diff_formatter}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{diff_formatter}); + globalObject.throwPretty(fmt, .{diff_formatter}); return .zero; } @@ -1421,14 +1297,7 @@ pub const Expect = struct { const signature = comptime getSignature("toHaveProperty", "path, value", true); if (!received_property.isEmpty()) { const fmt = signature ++ "\n\nExpected path: {any}\n\nExpected value: not {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ - expected_property_path.toFmt(globalObject, &formatter), - expected_property.?.toFmt(globalObject, &formatter), - }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, true), .{ + globalObject.throwPretty(fmt, .{ expected_property_path.toFmt(globalObject, &formatter), expected_property.?.toFmt(globalObject, &formatter), }); @@ -1438,14 +1307,7 @@ pub const Expect = struct { const signature = comptime getSignature("toHaveProperty", "path", true); const fmt = signature ++ "\n\nExpected path: not {any}\n\nReceived value: {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ - expected_property_path.toFmt(globalObject, &formatter), - received_property.toFmt(globalObject, &formatter), - }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ + globalObject.throwPretty(fmt, .{ expected_property_path.toFmt(globalObject, &formatter), received_property.toFmt(globalObject, &formatter), }); @@ -1463,24 +1325,13 @@ pub const Expect = struct { .globalObject = globalObject, }; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{diff_format}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{diff_format}); + globalObject.throwPretty(fmt, .{diff_format}); return .zero; } const fmt = signature ++ "\n\nExpected path: {any}\n\nExpected value: {any}\n\n" ++ "Unable to find property\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ - expected_property_path.toFmt(globalObject, &formatter), - expected_property.?.toFmt(globalObject, &formatter), - }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ + globalObject.throwPretty(fmt, .{ expected_property_path.toFmt(globalObject, &formatter), expected_property.?.toFmt(globalObject, &formatter), }); @@ -1489,11 +1340,7 @@ pub const Expect = struct { const signature = comptime getSignature("toHaveProperty", "path", false); const fmt = signature ++ "\n\nExpected path: {any}\n\nUnable to find property\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_property_path.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{expected_property_path.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{expected_property_path.toFmt(globalObject, &formatter)}); return .zero; } @@ -1541,23 +1388,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeEven", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeEven", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -1613,12 +1450,8 @@ pub const Expect = struct { const expected_line = "Expected: not \\> {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeGreaterThan", "expected", true) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1626,12 +1459,8 @@ pub const Expect = struct { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeGreaterThan", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(comptime Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1687,23 +1516,14 @@ pub const Expect = struct { const expected_line = "Expected: not \\>= {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeGreaterThanOrEqual", "expected", true) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } const expected_line = "Expected: \\>= {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeGreaterThanOrEqual", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(comptime Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - globalObject.throw(comptime Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1759,23 +1579,14 @@ pub const Expect = struct { const expected_line = "Expected: not \\< {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeLessThan", "expected", true) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } const expected_line = "Expected: \\< {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeLessThan", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(comptime Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - globalObject.throw(comptime Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1831,23 +1642,14 @@ pub const Expect = struct { const expected_line = "Expected: not \\<= {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeLessThanOrEqual", "expected", true) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } const expected_line = "Expected: \\<= {any}\n"; const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeLessThanOrEqual", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(comptime Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - globalObject.throw(comptime Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } @@ -1925,23 +1727,13 @@ pub const Expect = struct { if (not) { const fmt = comptime getSignature("toBeCloseTo", "expected, precision", true) ++ suffix_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); + globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); return .zero; } const fmt = comptime getSignature("toBeCloseTo", "expected, precision", false) ++ suffix_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); + globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt, precision, expected_diff, actual_diff }); return .zero; } @@ -1987,23 +1779,13 @@ pub const Expect = struct { if (not) { const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeOdd", "", true) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } const received_line = "Received: {any}\n"; const fmt = comptime getSignature("toBeOdd", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{value_fmt}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{value_fmt}); + globalObject.throwPretty(fmt, .{value_fmt}); return .zero; } @@ -2173,11 +1955,7 @@ pub const Expect = struct { expected_value.getClassName(globalObject, &expected_class); const received_message = result.getIfPropertyExistsImpl(globalObject, "message", 7); const fmt = signature ++ "\n\nExpected constructor: not {s}\n\nReceived message: {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_class, received_message.toFmt(globalObject, &formatter) }); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_class, received_message.toFmt(globalObject, &formatter) }); + globalObject.throwPretty(fmt, .{ expected_class, received_message.toFmt(globalObject, &formatter) }); return .zero; } @@ -2320,47 +2098,28 @@ pub const Expect = struct { if (expected_value.isEmpty() or expected_value.isUndefined()) { const fmt = comptime getSignature("toThrow", "", false) ++ "\n\n" ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{}); + globalObject.throwPretty(fmt, .{}); return .zero; } if (expected_value.isString()) { const expected_fmt = "\n\nExpected substring: {any}\n\n" ++ received_line; const fmt = signature ++ expected_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_value.toFmt(globalObject, &formatter)}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{expected_value.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{expected_value.toFmt(globalObject, &formatter)}); return .zero; } if (expected_value.isRegExp()) { const expected_fmt = "\n\nExpected pattern: {any}\n\n" ++ received_line; const fmt = signature ++ expected_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_value.toFmt(globalObject, &formatter)}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{expected_value.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{expected_value.toFmt(globalObject, &formatter)}); return .zero; } if (expected_value.get(globalObject, "message")) |expected_message| { const expected_fmt = "\n\nExpected message: {any}\n\n" ++ received_line; const fmt = signature ++ expected_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_message.toFmt(globalObject, &formatter)}); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{expected_message.toFmt(globalObject, &formatter)}); + globalObject.throwPretty(fmt, .{expected_message.toFmt(globalObject, &formatter)}); return .zero; } @@ -2368,11 +2127,7 @@ pub const Expect = struct { var expected_class = ZigString.Empty; expected_value.getClassName(globalObject, &expected_class); const fmt = signature ++ expected_fmt; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_class}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, true), .{expected_class}); + globalObject.throwPretty(fmt, .{expected_class}); return .zero; } @@ -3529,11 +3284,7 @@ pub const Expect = struct { if (not) { const signature = comptime getSignature("toSatisfy", "expected", true); const fmt = signature ++ "\n\nExpected: not {any}\n"; - if (Output.enable_ansi_colors) { - globalThis.throw(Output.prettyFmt(fmt, true), .{predicate.toFmt(globalThis, &formatter)}); - return .zero; - } - globalThis.throw(Output.prettyFmt(fmt, false), .{predicate.toFmt(globalThis, &formatter)}); + globalThis.throwPretty(fmt, .{predicate.toFmt(globalThis, &formatter)}); return .zero; } @@ -3541,15 +3292,7 @@ pub const Expect = struct { const fmt = signature ++ "\n\nExpected: {any}\nReceived: {any}\n"; - if (Output.enable_ansi_colors) { - globalThis.throw(Output.prettyFmt(fmt, true), .{ - predicate.toFmt(globalThis, &formatter), - value.toFmt(globalThis, &formatter), - }); - return .zero; - } - - globalThis.throw(Output.prettyFmt(fmt, false), .{ + globalThis.throwPretty(fmt, .{ predicate.toFmt(globalThis, &formatter), value.toFmt(globalThis, &formatter), }); @@ -3703,12 +3446,7 @@ pub const Expect = struct { const expected_line = "Expected constructor: not {any}\n"; const received_line = "Received value: {any}\n"; const fmt = comptime getSignature("toBeInstanceOf", "expected", true) ++ "\n\n" ++ expected_line ++ received_line; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{ expected_fmt, value_fmt }); - return .zero; - } - - globalObject.throw(Output.prettyFmt(fmt, false), .{ expected_fmt, value_fmt }); + globalObject.throwPretty(fmt, .{ expected_fmt, value_fmt }); return .zero; } From 30951d788d0bfc78f4b8b4b630d5d35608d670b3 Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Wed, 21 Feb 2024 17:04:57 -0300 Subject: [PATCH 06/21] actually use the ref count in this places --- src/bun.js/api/server.zig | 5 ++--- src/bun.js/webcore/body.zig | 22 +++++++--------------- src/bun.js/webcore/streams.zig | 26 ++++++++++++-------------- 3 files changed, 21 insertions(+), 32 deletions(-) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 11a7678f1f..7260371e42 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2558,7 +2558,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (req.response_ptr) |resp| { if (resp.body.value == .Locked) { - resp.body.value.Locked.readable.?.done(); + resp.body.value.Locked.readable.?.done(req.server.globalThis); resp.body.value = .{ .Used = {} }; } } @@ -2618,7 +2618,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (req.response_ptr) |resp| { if (resp.body.value == .Locked) { - resp.body.value.Locked.readable.?.done(); + resp.body.value.Locked.readable.?.done(req.server.globalThis); resp.body.value = .{ .Used = {} }; } } @@ -2714,7 +2714,6 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .Bytes => |byte_stream| { std.debug.assert(byte_stream.pipe.ctx == null); std.debug.assert(this.byte_stream == null); - if (this.resp == null) { // we don't have a response, so we can discard the stream stream.detachIfPossible(this.server.globalThis); diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index ef3ef9f78e..4babc8745b 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -167,7 +167,7 @@ pub const Body = struct { if (value.onStartBuffering != null) { if (readable.isDisturbed(globalThis)) { form_data.?.deinit(); - readable.value.unprotect(); + readable.detachIfPossible(globalThis); value.readable = null; value.action = .{ .none = {} }; return JSC.JSPromise.rejectedPromiseValue(globalThis, globalThis.createErrorInstance("ReadableStream is already used", .{})); @@ -191,7 +191,7 @@ pub const Body = struct { else => unreachable, }; value.promise.?.ensureStillAlive(); - readable.value.unprotect(); + readable.detachIfPossible(globalThis); // js now owns the memory value.readable = null; @@ -393,7 +393,7 @@ pub const Body = struct { .global = globalThis, }, }; - this.Locked.readable.?.value.protect(); + this.Locked.readable.?.incrementCount(); return value; }, @@ -580,7 +580,7 @@ pub const Body = struct { } pub fn fromReadableStreamWithoutLockCheck(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value { - readable.value.protect(); + readable.incrementCount(); return .{ .Locked = .{ .readable = readable, @@ -589,20 +589,12 @@ pub const Body = struct { }; } - pub fn fromReadableStream(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value { - if (readable.isLocked(globalThis)) { - return .{ .Error = ZigString.init("Cannot use a locked ReadableStream").toErrorInstance(globalThis) }; - } - - return fromReadableStreamWithoutLockCheck(readable, globalThis); - } - pub fn resolve(to_resolve: *Value, new: *Value, global: *JSGlobalObject) void { log("resolve", .{}); if (to_resolve.* == .Locked) { var locked = &to_resolve.Locked; if (locked.readable) |readable| { - readable.done(); + readable.done(global); locked.readable = null; } @@ -821,7 +813,7 @@ pub const Body = struct { } if (locked.readable) |readable| { - readable.done(); + readable.done(global); locked.readable = null; } // will be unprotected by body value deinit @@ -862,7 +854,7 @@ pub const Body = struct { this.Locked.deinit = true; if (this.Locked.readable) |*readable| { - readable.done(); + readable.done(this.Locked.global); } } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index f21364ee16..84e309b970 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -51,6 +51,15 @@ pub const ReadableStream = struct { value: JSValue, ptr: Source, + pub fn incrementCount(this: *const ReadableStream) void { + switch (this.ptr) { + .Blob => |blob| blob.parent().incrementCount(), + .File => |file| file.parent().incrementCount(), + .Bytes => |bytes| bytes.parent().incrementCount(), + else => {}, + } + } + pub const Strong = struct { held: JSC.Strong = .{}, @@ -59,18 +68,7 @@ pub const ReadableStream = struct { } pub fn init(this: ReadableStream, global: *JSGlobalObject) Strong { - switch (this.ptr) { - .Blob => |stream| { - stream.parent().incrementCount(); - }, - .File => |stream| { - stream.parent().incrementCount(); - }, - .Bytes => |stream| { - stream.parent().incrementCount(); - }, - else => {}, - } + this.incrementCount(); return .{ .held = JSC.Strong.create(this.value, global), }; @@ -156,8 +154,8 @@ pub const ReadableStream = struct { return null; } - pub fn done(this: *const ReadableStream) void { - this.value.unprotect(); + pub fn done(this: *const ReadableStream, globalThis: *JSGlobalObject) void { + this.detachIfPossible(globalThis); } pub fn cancel(this: *const ReadableStream, globalThis: *JSGlobalObject) void { From a3293756c575ac9616ad049bb023706bbe018dc2 Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Wed, 21 Feb 2024 17:18:44 -0300 Subject: [PATCH 07/21] make windows compile again --- src/bun.zig | 2 +- src/cli/run_command.zig | 33 +++++++++++++++++---------------- src/shell/interpreter.zig | 15 ++++++++++++++- 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/src/bun.zig b/src/bun.zig index 97a1ca55b2..8caa4d5cff 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -49,7 +49,7 @@ pub const allocators = @import("./allocators.zig"); pub const shell = struct { pub usingnamespace @import("./shell/shell.zig"); pub const ShellSubprocess = @import("./shell/subproc.zig").ShellSubprocess; - pub const ShellSubprocessMini = @import("./shell/subproc.zig").ShellSubprocessMini; + // pub const ShellSubprocessMini = @import("./shell/subproc.zig").ShellSubprocessMini; }; pub const Output = @import("./output.zig"); diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index df444fa777..8c9003e3ea 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -305,25 +305,26 @@ pub const RunCommand = struct { } if (Environment.isWindows and !use_native_shell) { - if (!silent) { - if (Environment.isDebug) { - Output.prettyError("[bun shell] ", .{}); - } - Output.prettyErrorln("$ {s}", .{combined_script}); - Output.flush(); - } + @panic("TODO: Windows shell support"); + // if (!silent) { + // if (Environment.isDebug) { + // Output.prettyError("[bun shell] ", .{}); + // } + // Output.prettyErrorln("$ {s}", .{combined_script}); + // Output.flush(); + // } - const mini = bun.JSC.MiniEventLoop.initGlobal(env); - bun.shell.ShellSubprocessMini.initAndRunFromSource(mini, name, combined_script) catch |err| { - if (!silent) { - Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ name, @errorName(err) }); - } + // const mini = bun.JSC.MiniEventLoop.initGlobal(env); + // bun.shell.ShellSubprocessMini.initAndRunFromSource(mini, name, combined_script) catch |err| { + // if (!silent) { + // Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ name, @errorName(err) }); + // } - Output.flush(); - Global.exit(1); - }; + // Output.flush(); + // Global.exit(1); + // }; - return true; + // return true; } var argv = [_]string{ diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 40773c923c..352ca8e9bf 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -2010,6 +2010,10 @@ pub const Interpreter = struct { .expansion = expansion, .result = std.ArrayList([:0]const u8).init(allocator), }; + if (bun.Environment.isWindows) { + // event loop here is js event loop + @panic("TODO SHELL WINDOWS!"); + } // this.ref.ref(this.event_loop.virtual_machine); this.ref.ref(this.event_loop); @@ -2050,6 +2054,10 @@ pub const Interpreter = struct { pub fn runFromMainThread(this: *This) void { print("runFromJS", .{}); + if (bun.Environment.isWindows) { + // event loop here is js event loop + @panic("TODO SHELL WINDOWS!"); + } this.expansion.onGlobWalkDone(this); // this.ref.unref(this.event_loop.virtual_machine); this.ref.unref(this.event_loop); @@ -7377,7 +7385,7 @@ pub const Interpreter = struct { /// it. IT DOES NOT CLOSE FILE DESCRIPTORS pub const BufferedWriter = struct { - writer: Writer = .{ + writer: Writer = if (bun.Environment.isWindows) .{} else .{ .close_fd = false, }, fd: bun.FileDescriptor = bun.invalid_fd, @@ -7689,6 +7697,11 @@ pub fn ShellTask( pub fn schedule(this: *@This()) void { print("schedule", .{}); + + if (bun.Environment.isWindows) { + // event loop here is js event loop + @panic("TODO SHELL WINDOWS!"); + } this.ref.ref(this.event_loop); WorkPool.schedule(&this.task); } From 411c7874f8b676710cad8161ce68cb7d3547f3f2 Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Wed, 21 Feb 2024 17:59:54 -0300 Subject: [PATCH 08/21] more tests passing --- src/bun.js/api/server.zig | 4 +- test/js/bun/http/serve.test.ts | 116 ++++++++++++++++----------------- 2 files changed, 58 insertions(+), 62 deletions(-) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 7260371e42..e9ae8c8d93 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2158,7 +2158,6 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .auto_close = false, .socket_fd = bun.invalid_fd, }; - this.response_buf_owned = .{ .items = result.result.buf, .capacity = result.result.buf.len }; this.resp.?.runCorkedWithType(*RequestContext, renderResponseBufferAndMetadata, this); } @@ -3047,8 +3046,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp var response: *JSC.WebCore.Response = this.response_ptr.?; var status = response.statusCode(); - var needs_content_range = this.flags.needs_content_range and this.sendfile.remain < this.blob.size(); - + var needs_content_range = this.flags.needs_content_range and this.sendfile.remain <= this.blob.size(); const size = if (needs_content_range) this.sendfile.remain else diff --git a/test/js/bun/http/serve.test.ts b/test/js/bun/http/serve.test.ts index 0a6c07e44a..551650a76e 100644 --- a/test/js/bun/http/serve.test.ts +++ b/test/js/bun/http/serve.test.ts @@ -59,7 +59,7 @@ afterAll(() => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(Number(statusCode)); expect(await response.text()).toBe("Foo Bar"); }, @@ -81,7 +81,7 @@ afterAll(() => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(500); expect(await response.text()).toBe("Error!"); }, @@ -98,7 +98,7 @@ it("should display a welcome message when the response value type is incorrect", }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); const text = await response.text(); expect(text).toContain("Welcome to Bun!"); }, @@ -122,7 +122,7 @@ it("request.signal works in trivial case", async () => { }, async server => { try { - await fetch(`http://${server.hostname}:${server.port}`, { signal: aborty.signal }); + await fetch(server.url.origin, { signal: aborty.signal }); throw new Error("Expected fetch to throw"); } catch (e: any) { expect(e.name).toBe("AbortError"); @@ -152,9 +152,7 @@ it("request.signal works in leaky case", async () => { }, }, async server => { - expect(async () => fetch(`http://${server.hostname}:${server.port}`, { signal: aborty.signal })).toThrow( - "The operation was aborted.", - ); + expect(async () => fetch(server.url.origin, { signal: aborty.signal })).toThrow("The operation was aborted."); await Bun.sleep(1); @@ -173,7 +171,7 @@ it("should work for a file", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -210,7 +208,7 @@ it("request.url should be based on the Host header", async () => { }, }, async server => { - const expected = `http://${server.hostname}:${server.port}/helloooo`; + const expected = `${server.url.origin}/helloooo`; const response = await fetch(expected, { headers: { Host: "example.com", @@ -250,7 +248,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(402); expect(response.headers.get("I-AM")).toBe("A-TEAPOT"); expect(await response.text()).toBe(""); @@ -286,7 +284,7 @@ describe("streaming", () => { }, async server => { console.log("async server() => {}"); - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); // connection terminated expect(await response.text()).toBe(""); expect(response.status).toBe(options.status ?? 200); @@ -344,7 +342,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); const text = await response.text(); expect(text.length).toBe(textToExpect.length); expect(text).toBe(textToExpect); @@ -369,7 +367,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -396,7 +394,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(200); expect(await response.text()).toBe("Test Passed"); }, @@ -427,7 +425,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(500); }, ); @@ -454,7 +452,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(500); expect(await response.text()).toBe("Fail"); expect(pass).toBe(true); @@ -485,7 +483,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -508,7 +506,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); const text = await response.text(); expect(text).toBe(textToExpect); }, @@ -536,7 +534,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -572,7 +570,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); count++; }, @@ -601,7 +599,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -631,7 +629,7 @@ describe("streaming", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -646,7 +644,7 @@ it("should work for a hello world", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe("Hello, world!"); }, ); @@ -662,7 +660,7 @@ it("should work for a blob", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -678,7 +676,7 @@ it("should work for a blob stream", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -694,7 +692,7 @@ it("should work for a file stream", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); }, ); @@ -714,7 +712,7 @@ it("fetch should work with headers", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`, { + const response = await fetch(server.url.origin, { headers: { "X-Foo": "bar", }, @@ -736,7 +734,7 @@ it(`should work for a file ${count} times serial`, async () => { }, async server => { for (let i = 0; i < count; i++) { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); } }, @@ -753,7 +751,7 @@ it(`should work for ArrayBuffer ${count} times serial`, async () => { }, async server => { for (let i = 0; i < count; i++) { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe(textToExpect); } }, @@ -772,11 +770,11 @@ describe("parallel", () => { async server => { for (let i = 0; i < count; ) { let responses = await Promise.all([ - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), ]); for (let response of responses) { @@ -798,11 +796,11 @@ describe("parallel", () => { async server => { for (let i = 0; i < count; ) { let responses = await Promise.all([ - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), - fetch(`http://${server.hostname}:${server.port}`), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), + fetch(server.url.origin), ]); for (let response of responses) { @@ -823,10 +821,10 @@ it("should support reloading", async () => { fetch: first, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(await response.text()).toBe("first"); server.reload({ fetch: second }); - const response2 = await fetch(`http://${server.hostname}:${server.port}`); + const response2 = await fetch(server.url.origin); expect(await response2.text()).toBe("second"); }, ); @@ -904,7 +902,7 @@ describe("status code text", () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.status).toBe(parseInt(code)); expect(response.statusText).toBe(fixture[code]); }, @@ -927,7 +925,7 @@ it("should support multiple Set-Cookie headers", async () => { }, }, async server => { - const response = await fetch(`http://${server.hostname}:${server.port}`); + const response = await fetch(server.url.origin); expect(response.headers.getAll("Set-Cookie")).toEqual(["foo=bar", "baz=qux"]); expect(response.headers.get("Set-Cookie")).toEqual("foo=bar, baz=qux"); @@ -995,7 +993,7 @@ describe("should support Content-Range with Bun.file()", () => { for (const [start, end] of good) { it(`good range: ${start} - ${end}`, async () => { await getServer(async server => { - const response = await fetch(`http://${server.hostname}:${server.port}/?start=${start}&end=${end}`, { + const response = await fetch(`${server.url.origin}/?start=${start}&end=${end}`, { verbose: true, }); expect(await response.arrayBuffer()).toEqual(full.buffer.slice(start, end)); @@ -1007,7 +1005,7 @@ describe("should support Content-Range with Bun.file()", () => { for (const [start, end] of good) { it(`good range with size: ${start} - ${end}`, async () => { await getServerWithSize(async server => { - const response = await fetch(`http://${server.hostname}:${server.port}/?start=${start}&end=${end}`, { + const response = await fetch(`${server.url.origin}/?start=${start}&end=${end}`, { verbose: true, }); expect(parseInt(response.headers.get("Content-Range")?.split("/")[1])).toEqual(full.byteLength); @@ -1032,7 +1030,7 @@ describe("should support Content-Range with Bun.file()", () => { for (const [start, end] of emptyRanges) { it(`empty range: ${start} - ${end}`, async () => { await getServer(async server => { - const response = await fetch(`http://${server.hostname}:${server.port}/?start=${start}&end=${end}`); + const response = await fetch(`${server.url.origin}/?start=${start}&end=${end}`); const out = await response.arrayBuffer(); expect(out).toEqual(new ArrayBuffer(0)); expect(response.status).toBe(206); @@ -1054,7 +1052,7 @@ describe("should support Content-Range with Bun.file()", () => { for (const [start, end] of badRanges) { it(`bad range: ${start} - ${end}`, async () => { await getServer(async server => { - const response = await fetch(`http://${server.hostname}:${server.port}/?start=${start}&end=${end}`); + const response = await fetch(`${server.url.origin}/?start=${start}&end=${end}`); const out = await response.arrayBuffer(); expect(out).toEqual(new ArrayBuffer(0)); expect(response.status).toBe(206); @@ -1097,7 +1095,7 @@ it("request body and signal life cycle", async () => { const requests = []; for (let j = 0; j < 10; j++) { for (let i = 0; i < 250; i++) { - requests.push(fetch(`http://${server.hostname}:${server.port}`)); + requests.push(fetch(server.url.origin)); } await Promise.all(requests); @@ -1130,7 +1128,7 @@ it("propagates content-type from a Bun.file()'s file path in fetch()", async () }); // @ts-ignore - const reqBody = new Request(`http://${server.hostname}:${server.port}`, { + const reqBody = new Request(server.url.origin, { body, method: "POST", }); @@ -1155,7 +1153,7 @@ it("does propagate type for Blob", async () => { const body = new Blob(["hey"], { type: "text/plain;charset=utf-8" }); // @ts-ignore - const res = await fetch(`http://${server.hostname}:${server.port}`, { + const res = await fetch(server.url.origin, { body, method: "POST", }); @@ -1221,7 +1219,7 @@ it("#5859 text", async () => { }, }); - const response = await fetch(`http://${server.hostname}:${server.port}`, { + const response = await fetch(server.url.origin, { method: "POST", body: new Uint8Array([0xfd]), }); @@ -1244,7 +1242,7 @@ it("#5859 json", async () => { }, }); - const response = await fetch(`http://${server.hostname}:${server.port}`, { + const response = await fetch(server.url.origin, { method: "POST", body: new Uint8Array([0xfd]), }); @@ -1268,7 +1266,7 @@ it("server.requestIP (v4)", async () => { hostname: "127.0.0.1", }); - const response = await fetch(`http://${server.hostname}:${server.port}`).then(x => x.json()); + const response = await fetch(server.url.origin).then(x => x.json()); expect(response).toEqual({ address: "127.0.0.1", family: "IPv4", @@ -1333,7 +1331,7 @@ it("should response with HTTP 413 when request body is larger than maxRequestBod }); { - const resp = await fetch(`http://${server.hostname}:${server.port}`, { + const resp = await fetch(server.url.origin, { method: "POST", body: "A".repeat(10), }); @@ -1341,7 +1339,7 @@ it("should response with HTTP 413 when request body is larger than maxRequestBod expect(await resp.text()).toBe("OK"); } { - const resp = await fetch(`http://${server.hostname}:${server.port}`, { + const resp = await fetch(server.url.origin, { method: "POST", body: "A".repeat(11), }); @@ -1379,24 +1377,24 @@ it("should support promise returned from error", async () => { }); { - const resp = await fetch(`http://${server.hostname}:${server.port}/async-fulfilled`); + const resp = await fetch(`${server.url.origin}/async-fulfilled`); expect(resp.status).toBe(200); expect(await resp.text()).toBe("OK"); } { - const resp = await fetch(`http://${server.hostname}:${server.port}/async-pending`); + const resp = await fetch(`${server.url.origin}/async-pending`); expect(resp.status).toBe(200); expect(await resp.text()).toBe("OK"); } { - const resp = await fetch(`http://${server.hostname}:${server.port}/async-rejected`); + const resp = await fetch(`${server.url.origin}/async-rejected`); expect(resp.status).toBe(500); } { - const resp = await fetch(`http://${server.hostname}:${server.port}/async-rejected-pending`); + const resp = await fetch(`${server.url.origin}/async-rejected-pending`); expect(resp.status).toBe(500); } From 61845426829ccbea072a417df45dd8b3d5d822d3 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 21 Feb 2024 14:13:43 -0800 Subject: [PATCH 09/21] Add `BUN_DEBUG` flag to control where debug logs go (#9019) * Add `BUN_DEBUG` flag to control where debug logs go * Update all the actions * Configure temp * use spawn instead of rm * Use CLOSE_RANGE_CLOEXEC * Make some tests more reproducible * Update hot.test.ts * Detect file descriptor leaks and wait for stdout * Update runner.node.mjs * Update preload.ts --------- Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- .github/workflows/bun-linux-aarch64.yml | 12 +- .github/workflows/bun-linux-build.yml | 19 +- .github/workflows/bun-mac-aarch64.yml | 31 +- .github/workflows/bun-mac-x64-baseline.yml | 33 +- .github/workflows/bun-mac-x64.yml | 33 +- .github/workflows/bun-release.yml | 18 +- .github/workflows/bun-types-tests.yml | 2 +- .github/workflows/bun-windows.yml | 37 +- bunfig.toml | 1 + docs/cli/install.md | 2 +- docs/guides/runtime/cicd.md | 2 +- .../bun-internal-test/src/runner.node.mjs | 116 ++++- packages/bun-uws/.github/workflows/codeql.yml | 2 +- packages/bun-uws/.github/workflows/cpp.yml | 2 +- src/bun.js/bindings/bun-spawn.cpp | 17 +- src/bun.js/bindings/c-bindings.cpp | 2 +- src/output.zig | 54 ++- test/cli/hot/hot.test.ts | 443 ++++++++++-------- test/js/node/watch/fs.watchFile.test.ts | 2 +- test/preload.ts | 10 + 20 files changed, 521 insertions(+), 317 deletions(-) create mode 100644 test/preload.ts diff --git a/.github/workflows/bun-linux-aarch64.yml b/.github/workflows/bun-linux-aarch64.yml index 292f58e02c..3ba8f9dab9 100644 --- a/.github/workflows/bun-linux-aarch64.yml +++ b/.github/workflows/bun-linux-aarch64.yml @@ -51,14 +51,14 @@ jobs: runner: linux-arm64 build_machine_arch: aarch64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: false ref: ${{github.sha}} clean: true - run: | bash ./scripts/update-submodules.sh - - uses: docker/setup-buildx-action@v2 + - uses: docker/setup-buildx-action@v3 id: buildx with: install: true @@ -66,7 +66,7 @@ jobs: run: | rm -rf ${{runner.temp}}/release - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -74,7 +74,7 @@ jobs: - run: | mkdir -p /tmp/.buildx-cache-${{matrix.tag}} - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . push: false @@ -113,11 +113,11 @@ jobs: zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: bun-${{matrix.tag}}-profile path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: bun-${{matrix.tag}} path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip diff --git a/.github/workflows/bun-linux-build.yml b/.github/workflows/bun-linux-build.yml index bee8cee6ac..6e8543902c 100644 --- a/.github/workflows/bun-linux-build.yml +++ b/.github/workflows/bun-linux-build.yml @@ -86,7 +86,7 @@ jobs: submodules: recursive ref: ${{github.sha}} clean: true - - uses: docker/setup-buildx-action@v2 + - uses: docker/setup-buildx-action@v3 id: buildx with: install: true @@ -94,7 +94,7 @@ jobs: run: | rm -rf ${{runner.temp}}/release - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -102,7 +102,7 @@ jobs: - run: | mkdir -p /tmp/.buildx-cache-${{matrix.tag}} - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . push: false @@ -154,19 +154,19 @@ jobs: zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: bun-${{matrix.tag}}-profile path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: bun-${{matrix.tag}} path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: bun-obj-${{matrix.tag}} path: ${{runner.temp}}/release/bun-obj - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}}-dependencies path: ${{runner.temp}}/release/bun-dependencies @@ -234,7 +234,7 @@ jobs: clean: true - id: download name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: bun-${{matrix.tag}} path: ${{runner.temp}}/release @@ -275,6 +275,7 @@ jobs: name: Test (node runner) env: SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} + TMPDIR: ${{runner.temp}} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} # if: ${{github.event.inputs.use_bun == 'false'}} @@ -283,7 +284,7 @@ jobs: ulimit -c node packages/bun-internal-test/src/runner.node.mjs || true - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: steps.test.outputs.failing_tests != '' with: name: cores diff --git a/.github/workflows/bun-mac-aarch64.yml b/.github/workflows/bun-mac-aarch64.yml index 8f312e4f49..bcb01918fa 100644 --- a/.github/workflows/bun-mac-aarch64.yml +++ b/.github/workflows/bun-mac-aarch64.yml @@ -51,20 +51,20 @@ jobs: # run: git submodule update --init --recursive --depth=1 --progress --force - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Compile Zig Object - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 if: runner.arch == 'X64' with: context: . @@ -84,7 +84,7 @@ jobs: outputs: type=local,dest=${{runner.temp}}/release - name: Upload Zig Object - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }} path: ${{runner.temp}}/release/bun-zig.o @@ -141,7 +141,7 @@ jobs: - name: Cache submodule dependencies id: cache-deps-restore - uses: actions/cache/restore@v3 + uses: actions/cache/restore@v4 with: path: ${{runner.temp}}/bun-deps key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} @@ -159,13 +159,13 @@ jobs: - name: Cache submodule dependencies if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} id: cache-deps-save - uses: actions/cache/save@v3 + uses: actions/cache/save@v4 with: path: ${{runner.temp}}/bun-deps key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} - name: Upload submodule dependencies - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -235,7 +235,7 @@ jobs: bash compile-cpp-only.sh -v - name: Upload C++ - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a @@ -285,19 +285,19 @@ jobs: echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH - name: Download C++ - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj - name: Download Zig Object - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.obj }} path: ${{ runner.temp }}/release - name: Downloaded submodule dependencies - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -330,11 +330,11 @@ jobs: zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}.zip ${{matrix.tag}} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}}-profile path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip @@ -394,12 +394,12 @@ jobs: steps: - id: checkout name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: false - id: download name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/release @@ -426,6 +426,7 @@ jobs: name: Test (node runner) env: SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} + TMPDIR: ${{runner.temp}} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} # if: ${{github.event.inputs.use_bun == 'false'}} diff --git a/.github/workflows/bun-mac-x64-baseline.yml b/.github/workflows/bun-mac-x64-baseline.yml index 3e1493f2b9..ce7518ebae 100644 --- a/.github/workflows/bun-mac-x64-baseline.yml +++ b/.github/workflows/bun-mac-x64-baseline.yml @@ -55,20 +55,20 @@ jobs: - uses: actions/checkout@v4 - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Compile Zig Object - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . push: false @@ -97,7 +97,7 @@ jobs: outputs: type=local,dest=${{runner.temp}}/release - name: Upload Zig Object - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }} path: ${{runner.temp}}/release/bun-zig.o @@ -146,7 +146,7 @@ jobs: - name: Cache submodule dependencies id: cache-deps-restore - uses: actions/cache/restore@v3 + uses: actions/cache/restore@v4 with: path: ${{runner.temp}}/bun-deps key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} @@ -164,13 +164,13 @@ jobs: - name: Cache submodule dependencies if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} id: cache-deps-save - uses: actions/cache/save@v3 + uses: actions/cache/save@v4 with: path: ${{runner.temp}}/bun-deps key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} - name: Upload submodule dependencies - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -240,7 +240,7 @@ jobs: bash compile-cpp-only.sh -v - name: Upload C++ - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a @@ -262,7 +262,7 @@ jobs: runner: macos-12-large artifact: bun-obj-darwin-x64-baseline steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Checkout submodules run: git submodule update --init --recursive --depth=1 --progress --force @@ -286,19 +286,19 @@ jobs: echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH - name: Download C++ - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj - name: Download Zig Object - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.obj }} path: ${{ runner.temp }}/release - name: Downloaded submodule dependencies - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -331,11 +331,11 @@ jobs: zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}.zip ${{matrix.tag}} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}}-profile path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip @@ -396,12 +396,12 @@ jobs: steps: - id: checkout name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: false - id: download name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/release @@ -428,6 +428,7 @@ jobs: name: Test (node runner) env: SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} + TMPDIR: ${{runner.temp}} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} # if: ${{github.event.inputs.use_bun == 'false'}} diff --git a/.github/workflows/bun-mac-x64.yml b/.github/workflows/bun-mac-x64.yml index affdc7228c..753a391f22 100644 --- a/.github/workflows/bun-mac-x64.yml +++ b/.github/workflows/bun-mac-x64.yml @@ -52,20 +52,20 @@ jobs: - uses: actions/checkout@v4 - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Compile Zig Object - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . push: false @@ -94,7 +94,7 @@ jobs: outputs: type=local,dest=${{runner.temp}}/release - name: Upload Zig Object - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }} path: ${{runner.temp}}/release/bun-zig.o @@ -144,7 +144,7 @@ jobs: - name: Cache submodule dependencies id: cache-deps-restore - uses: actions/cache/restore@v3 + uses: actions/cache/restore@v4 with: path: ${{runner.temp}}/bun-deps key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} @@ -162,13 +162,13 @@ jobs: - name: Cache submodule dependencies if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} id: cache-deps-save - uses: actions/cache/save@v3 + uses: actions/cache/save@v4 with: path: ${{runner.temp}}/bun-deps key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} - name: Upload submodule dependencies - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -238,7 +238,7 @@ jobs: bash compile-cpp-only.sh -v - name: Upload C++ - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a @@ -260,7 +260,7 @@ jobs: runner: macos-12-large artifact: bun-obj-darwin-x64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Checkout submodules run: git submodule update --init --recursive --depth=1 --progress --force @@ -284,19 +284,19 @@ jobs: echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH - name: Download C++ - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-cpp path: ${{ runner.temp }}/bun-cpp-obj - name: Download Zig Object - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.obj }} path: ${{ runner.temp }}/release - name: Downloaded submodule dependencies - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.tag }}-deps path: ${{runner.temp}}/bun-deps @@ -329,11 +329,11 @@ jobs: zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}.zip ${{matrix.tag}} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}}-profile path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip @@ -393,12 +393,12 @@ jobs: steps: - id: checkout name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: false - id: download name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{matrix.tag}} path: ${{runner.temp}}/release @@ -426,6 +426,7 @@ jobs: env: SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} + TMPDIR: ${{runner.temp}} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} # if: ${{github.event.inputs.use_bun == 'false'}} run: | diff --git a/.github/workflows/bun-release.yml b/.github/workflows/bun-release.yml index f56a14b65b..89ca850737 100644 --- a/.github/workflows/bun-release.yml +++ b/.github/workflows/bun-release.yml @@ -51,7 +51,7 @@ jobs: working-directory: packages/bun-release steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup GPG uses: crazy-max/ghaction-import-gpg@v5 with: @@ -81,7 +81,7 @@ jobs: working-directory: packages/bun-release steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Bun uses: oven-sh/setup-bun@v1 with: @@ -105,7 +105,7 @@ jobs: working-directory: packages/bun-types steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Node.js uses: actions/setup-node@v3 with: @@ -170,12 +170,12 @@ jobs: suffix: -distroless steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Docker emulator uses: docker/setup-qemu-action@v2 - id: buildx name: Setup Docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: platforms: linux/amd64,linux/arm64 - id: metadata @@ -192,12 +192,12 @@ jobs: type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }} type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }} - name: Login to Docker - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Push to Docker - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: ./dockerhub/${{ matrix.dir || matrix.variant }} platforms: linux/amd64,linux/arm64 @@ -216,7 +216,7 @@ jobs: if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: oven-sh/homebrew-bun token: ${{ secrets.ROBOBUN_TOKEN }} @@ -252,7 +252,7 @@ jobs: working-directory: packages/bun-release steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Bun uses: oven-sh/setup-bun@v1 with: diff --git a/.github/workflows/bun-types-tests.yml b/.github/workflows/bun-types-tests.yml index bf3f591aa6..6fe222e71c 100644 --- a/.github/workflows/bun-types-tests.yml +++ b/.github/workflows/bun-types-tests.yml @@ -18,7 +18,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install bun uses: oven-sh/setup-bun@v1 diff --git a/.github/workflows/bun-windows.yml b/.github/workflows/bun-windows.yml index e74f2843c8..33c6a6e38b 100644 --- a/.github/workflows/bun-windows.yml +++ b/.github/workflows/bun-windows.yml @@ -60,13 +60,13 @@ jobs: - run: git config --global core.autocrlf false && git config --global core.eol lf - uses: actions/checkout@v4 - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 id: buildx with: install: true - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -79,7 +79,7 @@ jobs: echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT - name: Compile Zig Object - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 if: runner.arch == 'X64' with: context: . @@ -102,7 +102,7 @@ jobs: outputs: type=local,dest=${{runner.temp}}/release - name: Upload Zig Object - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: ${{runner.temp}}/release/bun-zig.o @@ -138,7 +138,7 @@ jobs: - name: Try fetch dependencies id: cache-deps-restore - uses: actions/cache/restore@v3 + uses: actions/cache/restore@v4 with: path: bun-deps key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }} @@ -165,7 +165,7 @@ jobs: .\scripts\all-dependencies.ps1 - name: Upload Dependencies - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: bun-deps/ @@ -173,7 +173,7 @@ jobs: - name: Cache Dependencies if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} id: cache-deps-save - uses: actions/cache/save@v3 + uses: actions/cache/save@v4 with: path: bun-deps key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} @@ -204,7 +204,7 @@ jobs: if: ${{ env.canary == 'true' }} run: | echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen path: build-codegen-win32-x64/ @@ -228,7 +228,7 @@ jobs: version: ${{ env.LLVM_VERSION }} - run: choco install -y ninja - name: Download Codegen - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen path: build @@ -263,7 +263,7 @@ jobs: if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } .\compile-cpp-only.ps1 -v if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: build/bun-cpp-objects.a @@ -288,22 +288,22 @@ jobs: version: ${{ env.LLVM_VERSION }} - run: choco install -y ninja - name: Download Codegen - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen path: build - name: Download Dependencies - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: bun-deps - name: Download Zig Object - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: bun-zig - name: Download C++ Objects - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: bun-cpp @@ -336,11 +336,11 @@ jobs: cp -r build\bun.pdb "$Dist\bun.pdb" Compress-Archive "$Dist" "$Dist.zip" - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip @@ -398,12 +398,12 @@ jobs: - run: git config --global core.autocrlf false && git config --global core.eol lf - id: checkout name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: false - id: download name: Download Release - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile path: ${{runner.temp}}/release @@ -431,6 +431,7 @@ jobs: name: Run tests env: SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} + TMPDIR: ${{runner.temp}} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} run: | diff --git a/bunfig.toml b/bunfig.toml index 99838d3ce6..fa1e4511ab 100644 --- a/bunfig.toml +++ b/bunfig.toml @@ -6,3 +6,4 @@ # # Instead, we can only scan the test directory for Bun's runtime tests root = "test" +preload = "./test/preload.ts" diff --git a/docs/cli/install.md b/docs/cli/install.md index 932f05a574..8c245dd5f7 100644 --- a/docs/cli/install.md +++ b/docs/cli/install.md @@ -195,7 +195,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install bun uses: oven-sh/setup-bun@v1 - name: Install dependencies diff --git a/docs/guides/runtime/cicd.md b/docs/guides/runtime/cicd.md index 862dcff2c4..c6d6a36a3b 100644 --- a/docs/guides/runtime/cicd.md +++ b/docs/guides/runtime/cicd.md @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: # ... - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v1 # run any `bun` or `bunx` command diff --git a/packages/bun-internal-test/src/runner.node.mjs b/packages/bun-internal-test/src/runner.node.mjs index cec84753bf..14177e865d 100644 --- a/packages/bun-internal-test/src/runner.node.mjs +++ b/packages/bun-internal-test/src/runner.node.mjs @@ -1,10 +1,11 @@ import * as action from "@actions/core"; import { spawn, spawnSync } from "child_process"; -import { rmSync, writeFileSync, readFileSync } from "fs"; +import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, close, closeSync } from "fs"; import { readFile } from "fs/promises"; import { readdirSync } from "node:fs"; import { resolve, basename } from "node:path"; -import { cpus, hostname, totalmem, userInfo } from "os"; +import { cpus, hostname, tmpdir, totalmem, userInfo } from "os"; +import { join } from "path"; import { fileURLToPath } from "url"; const run_start = new Date(); @@ -24,6 +25,20 @@ process.chdir(cwd); const ci = !!process.env["GITHUB_ACTIONS"]; const enableProgressBar = !ci; +var prevTmpdir = ""; +function maketemp() { + if (prevTmpdir && !windows) { + spawn("rm", ["-rf", prevTmpdir], { stdio: "inherit", detached: true }).unref(); + } + + prevTmpdir = join( + tmpdir(), + "bun-test-tmp-" + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36), + ); + mkdirSync(prevTmpdir, { recursive: true }); + return prevTmpdir; +} + function defaultConcurrency() { // Concurrency causes more flaky tests, only enable it by default on windows // See https://github.com/oven-sh/bun/issues/8071 @@ -40,10 +55,19 @@ const extensions = [".js", ".ts", ".jsx", ".tsx"]; const git_sha = process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim(); +const TEST_FILTER = process.env.BUN_TEST_FILTER; + function isTest(path) { if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) { return false; } + + if (TEST_FILTER) { + if (!path.includes(TEST_FILTER)) { + return false; + } + } + return true; } @@ -100,6 +124,33 @@ const failing_tests = []; const passing_tests = []; const fixes = []; const regressions = []; +let maxFd = -1; +function getMaxFileDescriptor(path) { + if (process.platform === "win32") { + return -1; + } + + hasInitialMaxFD = true; + + if (process.platform === "linux") { + try { + readdirSync("/proc/self/fd").forEach(name => { + const fd = parseInt(name.trim(), 10); + if (Number.isSafeInteger(fd) && fd >= 0) { + maxFd = Math.max(maxFd, fd); + } + }); + + return maxFd; + } catch {} + } + + const devnullfd = openSync("/dev/null", "r"); + closeSync(devnullfd); + maxFd = devnullfd + 1; + return maxFd; +} +let hasInitialMaxFD = false; async function runTest(path) { const name = path.replace(cwd, "").slice(1); @@ -107,14 +158,16 @@ async function runTest(path) { const expected_crash_reason = windows ? await readFile(resolve(path), "utf-8").then(data => { - const match = data.match(/@known-failing-on-windows:(.*)\n/); - return match ? match[1].trim() : null; - }) + const match = data.match(/@known-failing-on-windows:(.*)\n/); + return match ? match[1].trim() : null; + }) : null; const start = Date.now(); - await new Promise((done, reject) => { + await new Promise((finish, reject) => { + const chunks = []; + const proc = spawn(bunExe, ["test", resolve(path)], { stdio: ["ignore", "pipe", "pipe"], timeout: 1000 * 60 * 3, @@ -127,10 +180,26 @@ async function runTest(path) { // reproduce CI results locally GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true", BUN_DEBUG_QUIET_LOGS: "1", + TMPDIR: maketemp(), }, }); + proc.stdout.once("end", () => { + done(); + }); + + let doneCalls = 0; + let done = () => { + // TODO: wait for stderr as well + // spawn.test currently causes it to hang + if (doneCalls++ == 1) { + actuallyDone(); + } + }; + function actuallyDone() { + output = Buffer.concat(chunks).toString(); + finish(); + } - const chunks = []; proc.stdout.on("data", chunk => { chunks.push(chunk); if (run_concurrency === 1) process.stdout.write(chunk); @@ -140,18 +209,32 @@ async function runTest(path) { if (run_concurrency === 1) process.stderr.write(chunk); }); - proc.on("exit", (code_, signal_) => { + proc.once("exit", (code_, signal_) => { exitCode = code_; signal = signal_; - output = Buffer.concat(chunks).toString(); done(); }); - proc.on("error", err_ => { + proc.once("error", err_ => { err = err_; - done(); + done = () => {}; + actuallyDone(); }); }); + if (!hasInitialMaxFD) { + getMaxFileDescriptor(); + } else if (maxFd > 0) { + const prevMaxFd = maxFd; + maxFd = getMaxFileDescriptor(); + if (maxFd > prevMaxFd) { + process.stderr.write( + `\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${name}, delta: ${ + maxFd - prevMaxFd + }, current: ${maxFd}, previous: ${prevMaxFd}\n`, + ); + } + } + const passed = exitCode === 0 && !err && !signal; let reason = ""; @@ -195,7 +278,8 @@ async function runTest(path) { } console.log( - `\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖" + `\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${ + passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖" } ${name}\x1b[0m${reason ? ` (${reason})` : ""}`, ); @@ -319,9 +403,10 @@ console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n" console.log(header); console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"); -let report = `# bun test on ${process.env["GITHUB_REF"] ?? +let report = `# bun test on ${ + process.env["GITHUB_REF"] ?? spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim() - } +} \`\`\` ${header} @@ -345,7 +430,8 @@ if (regressions.length > 0) { report += regressions .map( ({ path, reason, expected_crash_reason }) => - `- [\`${path}\`](${sectionLink(path)}) ${reason}${expected_crash_reason ? ` (expected: ${expected_crash_reason})` : "" + `- [\`${path}\`](${sectionLink(path)}) ${reason}${ + expected_crash_reason ? ` (expected: ${expected_crash_reason})` : "" }`, ) .join("\n"); diff --git a/packages/bun-uws/.github/workflows/codeql.yml b/packages/bun-uws/.github/workflows/codeql.yml index abafedce26..8da1909db7 100644 --- a/packages/bun-uws/.github/workflows/codeql.yml +++ b/packages/bun-uws/.github/workflows/codeql.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/packages/bun-uws/.github/workflows/cpp.yml b/packages/bun-uws/.github/workflows/cpp.yml index f6c761de53..44c68bb2e7 100644 --- a/packages/bun-uws/.github/workflows/cpp.yml +++ b/packages/bun-uws/.github/workflows/cpp.yml @@ -20,7 +20,7 @@ jobs: language: c++ fuzz-seconds: 600 - name: Upload crash - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() && steps.build.outcome == 'success' with: name: artifacts diff --git a/src/bun.js/bindings/bun-spawn.cpp b/src/bun.js/bindings/bun-spawn.cpp index 407452ee38..ff83c5a57a 100644 --- a/src/bun.js/bindings/bun-spawn.cpp +++ b/src/bun.js/bindings/bun-spawn.cpp @@ -12,13 +12,14 @@ #include #include -static int close_range(unsigned int first) -{ - return syscall(__NR_close_range, first, ~0U, 0); -} - extern char** environ; +#ifndef CLOSE_RANGE_CLOEXEC +#define CLOSE_RANGE_CLOEXEC (1U << 2) +#endif + +extern "C" ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags); + enum FileActionType : uint8_t { None, Close, @@ -70,7 +71,7 @@ extern "C" ssize_t posix_spawn_bun( const auto childFailed = [&]() -> ssize_t { res = errno; status = res; - close_range(0); + bun_close_range(0, ~0U, 0); _exit(127); // should never be reached @@ -151,7 +152,9 @@ extern "C" ssize_t posix_spawn_bun( if (!envp) envp = environ; - close_range(current_max_fd + 1); + if (bun_close_range(current_max_fd + 1, ~0U, CLOSE_RANGE_CLOEXEC) != 0) { + bun_close_range(current_max_fd + 1, ~0U, 0); + } execve(path, argv, envp); _exit(127); diff --git a/src/bun.js/bindings/c-bindings.cpp b/src/bun.js/bindings/c-bindings.cpp index f2109b0da3..27eb3cb350 100644 --- a/src/bun.js/bindings/c-bindings.cpp +++ b/src/bun.js/bindings/c-bindings.cpp @@ -171,7 +171,7 @@ extern "C" int clock_gettime_monotonic(int64_t* tv_sec, int64_t* tv_nsec) #endif // close_range is glibc > 2.33, which is very new -static ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags) +extern "C" ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags) { return syscall(__NR_close_range, start, end, flags); } diff --git a/src/output.zig b/src/output.zig index a29cd80c9f..78df0b8c9c 100644 --- a/src/output.zig +++ b/src/output.zig @@ -486,7 +486,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f if (!out_set) { buffered_writer = .{ - .unbuffered_writer = writer(), + .unbuffered_writer = scopedWriter(), }; out = buffered_writer.writer(); out_set = true; @@ -495,7 +495,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f lock.lock(); defer lock.unlock(); - if (Output.enable_ansi_colors_stderr) { + if (Output.enable_ansi_colors_stdout and buffered_writer.unbuffered_writer.context.handle == writer().context.handle) { out.print(comptime prettyFmt("[" ++ @tagName(tag) ++ "] " ++ fmt, true), args) catch { really_disable = true; return; @@ -804,6 +804,56 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) } } +fn scopedWriter() std.fs.File.Writer { + if (comptime !Environment.isDebug) { + @compileError("scopedWriter() should only be called in debug mode"); + } + + const Scoped = struct { + pub var loaded_env: ?bool = null; + pub var scoped_file_writer: std.fs.File.Writer = undefined; + pub var scoped_file_writer_lock: bun.Lock = bun.Lock.init(); + }; + std.debug.assert(source_set); + Scoped.scoped_file_writer_lock.lock(); + defer Scoped.scoped_file_writer_lock.unlock(); + const use_env = Scoped.loaded_env orelse brk: { + if (bun.getenvZ("BUN_DEBUG")) |path| { + if (path.len > 0 and !strings.eql(path, "0") and !strings.eql(path, "false")) { + if (std.fs.path.dirname(path)) |dir| { + std.fs.cwd().makePath(dir) catch {}; + } + + // do not use libuv through this code path, since it might not be initialized yet. + const fd = std.os.openat( + std.fs.cwd().fd, + path, + std.os.O.TRUNC | std.os.O.CREAT | std.os.O.WRONLY, + 0o644, + ) catch |err_| { + // Ensure we don't panic inside panic + Scoped.loaded_env = false; + Scoped.scoped_file_writer_lock.unlock(); + Output.panic("Failed to open file for debug output: {s} ({s})", .{ @errorName(err_), path }); + }; + Scoped.scoped_file_writer = bun.toFD(fd).asFile().writer(); + Scoped.loaded_env = true; + break :brk true; + } + } + + Scoped.loaded_env = false; + + break :brk false; + }; + + if (use_env) { + return Scoped.scoped_file_writer; + } + + return source.stream.writer(); +} + /// Print a red error message with "error: " as the prefix. For custom prefixes see `err()` pub inline fn errGeneric(comptime fmt: []const u8, args: anytype) void { prettyErrorln("error: " ++ fmt, args); diff --git a/test/cli/hot/hot.test.ts b/test/cli/hot/hot.test.ts index 422f87eb67..37674913eb 100644 --- a/test/cli/hot/hot.test.ts +++ b/test/cli/hot/hot.test.ts @@ -1,258 +1,307 @@ import { spawn } from "bun"; -import { expect, it } from "bun:test"; +import { beforeAll, beforeEach, expect, it } from "bun:test"; import { bunExe, bunEnv, tempDirWithFiles, bunRun, bunRunAsScript } from "harness"; -import { readFileSync, renameSync, rmSync, unlinkSync, writeFileSync, copyFileSync } from "fs"; +import { cpSync, readFileSync, renameSync, rmSync, unlinkSync, writeFileSync, copyFileSync } from "fs"; import { join } from "path"; +import { tmpdir } from "os"; -const hotRunnerRoot = join(import.meta.dir, "hot-runner-root.js"); +let hotRunnerRoot: string = "", + cwd = ""; +beforeEach(() => { + const hotPath = join(tmpdir(), "bun-hot-test-" + (Date.now() | 0) + "_" + Math.random().toString(36).slice(2)); + hotRunnerRoot = join(hotPath, "hot-runner-root.js"); + rmSync(hotPath, { recursive: true, force: true }); + cpSync(import.meta.dir, hotPath, { recursive: true, force: true }); + cwd = hotPath; +}); it("should hot reload when file is overwritten", async () => { const root = hotRunnerRoot; - const runner = spawn({ - cmd: [bunExe(), "--hot", "run", root], - env: bunEnv, - stdout: "pipe", - stderr: "inherit", - stdin: "ignore", - }); + try { + var runner = spawn({ + cmd: [bunExe(), "--hot", "run", root], + env: bunEnv, + cwd, + stdout: "pipe", + stderr: "inherit", + stdin: "ignore", + }); - var reloadCounter = 0; + var reloadCounter = 0; - async function onReload() { - writeFileSync(root, readFileSync(root, "utf-8")); - } - - for await (const line of runner.stdout) { - var str = new TextDecoder().decode(line); - var any = false; - for (let line of str.split("\n")) { - if (!line.includes("[#!root]")) continue; - reloadCounter++; - - if (reloadCounter === 3) { - runner.unref(); - runner.kill(); - break; - } - - expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); - any = true; + async function onReload() { + writeFileSync(root, readFileSync(root, "utf-8")); } - if (any) await onReload(); - } + for await (const line of runner.stdout) { + var str = new TextDecoder().decode(line); + var any = false; + for (let line of str.split("\n")) { + if (!line.includes("[#!root]")) continue; + reloadCounter++; - expect(reloadCounter).toBe(3); + if (reloadCounter === 3) { + runner.unref(); + runner.kill(); + break; + } + + expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + any = true; + } + + if (any) await onReload(); + } + + expect(reloadCounter).toBe(3); + } finally { + // @ts-ignore + runner?.unref?.(); + // @ts-ignore + runner?.kill?.(9); + } }); it("should recover from errors", async () => { const root = hotRunnerRoot; - const runner = spawn({ - cmd: [bunExe(), "--hot", "run", root], - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - stdin: "ignore", - }); + try { + var runner = spawn({ + cmd: [bunExe(), "--hot", "run", root], + env: bunEnv, + cwd, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); - let reloadCounter = 0; - const input = readFileSync(root, "utf-8"); - function onReloadGood() { - writeFileSync(root, input); - } - - function onReloadError() { - writeFileSync(root, "throw new Error('error');\n"); - } - - var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood]; - var errors: string[] = []; - var onError: (...args: any[]) => void; - (async () => { - for await (let line of runner.stderr) { - var str = new TextDecoder().decode(line); - errors.push(str); - // @ts-ignore - onError && onError(str); + let reloadCounter = 0; + const input = readFileSync(root, "utf-8"); + function onReloadGood() { + writeFileSync(root, input); } - })(); - for await (const line of runner.stdout) { - var str = new TextDecoder().decode(line); - var any = false; - for (let line of str.split("\n")) { - if (!line.includes("[#!root]")) continue; - reloadCounter++; + function onReloadError() { + writeFileSync(root, "throw new Error('error');\n"); + } - if (reloadCounter === 3) { - runner.unref(); - runner.kill(); - break; + var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood]; + var errors: string[] = []; + var onError: (...args: any[]) => void; + (async () => { + for await (let line of runner.stderr) { + var str = new TextDecoder().decode(line); + errors.push(str); + // @ts-ignore + onError && onError(str); } + })(); - expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); - any = true; - } + for await (const line of runner.stdout) { + var str = new TextDecoder().decode(line); + var any = false; + for (let line of str.split("\n")) { + if (!line.includes("[#!root]")) continue; + reloadCounter++; - if (any) { - queue.shift()!(); - await new Promise((resolve, reject) => { - if (errors.length > 0) { - errors.length = 0; - resolve(); - return; + if (reloadCounter === 3) { + runner.unref(); + runner.kill(); + break; } - onError = resolve; - }); + expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + any = true; + } - queue.shift()!(); + if (any) { + queue.shift()!(); + await new Promise((resolve, reject) => { + if (errors.length > 0) { + errors.length = 0; + resolve(); + return; + } + + onError = resolve; + }); + + queue.shift()!(); + } } - } - expect(reloadCounter).toBe(3); + expect(reloadCounter).toBe(3); + } finally { + // @ts-ignore + runner?.unref?.(); + // @ts-ignore + runner?.kill?.(9); + } }); it("should not hot reload when a random file is written", async () => { const root = hotRunnerRoot; - const runner = spawn({ - cmd: [bunExe(), "--hot", "run", root], - env: bunEnv, - stdout: "pipe", - stderr: "inherit", - stdin: "ignore", - }); + try { + var runner = spawn({ + cmd: [bunExe(), "--hot", "run", root], + env: bunEnv, + cwd, + stdout: "pipe", + stderr: "inherit", + stdin: "ignore", + }); - let reloadCounter = 0; - const code = readFileSync(root, "utf-8"); - async function onReload() { - writeFileSync(root + ".another.yet.js", code); - unlinkSync(root + ".another.yet.js"); - } - var waiter = new Promise((resolve, reject) => { - setTimeout(async () => { - resolve(); - }, 50); - }); - var finished = false; - await Promise.race([ - waiter, - (async () => { - if (finished) { - return; - } - for await (const line of runner.stdout) { + let reloadCounter = 0; + const code = readFileSync(root, "utf-8"); + async function onReload() { + writeFileSync(root + ".another.yet.js", code); + unlinkSync(root + ".another.yet.js"); + } + var waiter = new Promise((resolve, reject) => { + setTimeout(async () => { + resolve(); + }, 50); + }); + var finished = false; + await Promise.race([ + waiter, + (async () => { if (finished) { return; } - - var str = new TextDecoder().decode(line); - for (let line of str.split("\n")) { - if (!line.includes("[#!root]")) continue; + for await (const line of runner.stdout) { if (finished) { return; } - await onReload(); - reloadCounter++; - expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + var str = new TextDecoder().decode(line); + for (let line of str.split("\n")) { + if (!line.includes("[#!root]")) continue; + if (finished) { + return; + } + await onReload(); + + reloadCounter++; + expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + } } - } - })(), - ]); - finished = true; - runner.kill(0); - runner.unref(); + })(), + ]); + finished = true; + runner.kill(0); + runner.unref(); - expect(reloadCounter).toBe(1); + expect(reloadCounter).toBe(1); + } finally { + // @ts-ignore + runner?.unref?.(); + // @ts-ignore + runner?.kill?.(9); + } }); it("should hot reload when a file is deleted and rewritten", async () => { - const root = hotRunnerRoot + ".tmp.js"; - copyFileSync(hotRunnerRoot, root); - const runner = spawn({ - cmd: [bunExe(), "--hot", "run", root], - env: bunEnv, - stdout: "pipe", - stderr: "inherit", - stdin: "ignore", - }); + try { + const root = hotRunnerRoot + ".tmp.js"; + copyFileSync(hotRunnerRoot, root); + var runner = spawn({ + cmd: [bunExe(), "--hot", "run", root], + env: bunEnv, + cwd, + stdout: "pipe", + stderr: "inherit", + stdin: "ignore", + }); - var reloadCounter = 0; + var reloadCounter = 0; - async function onReload() { - const contents = readFileSync(root, "utf-8"); - rmSync(root); - writeFileSync(root, contents); - } - - for await (const line of runner.stdout) { - var str = new TextDecoder().decode(line); - var any = false; - for (let line of str.split("\n")) { - if (!line.includes("[#!root]")) continue; - reloadCounter++; - - if (reloadCounter === 3) { - runner.unref(); - runner.kill(); - break; - } - - expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); - any = true; + async function onReload() { + const contents = readFileSync(root, "utf-8"); + rmSync(root); + writeFileSync(root, contents); } - if (any) await onReload(); + for await (const line of runner.stdout) { + var str = new TextDecoder().decode(line); + var any = false; + for (let line of str.split("\n")) { + if (!line.includes("[#!root]")) continue; + reloadCounter++; + + if (reloadCounter === 3) { + runner.unref(); + runner.kill(); + break; + } + + expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + any = true; + } + + if (any) await onReload(); + } + rmSync(root); + expect(reloadCounter).toBe(3); + } finally { + // @ts-ignore + runner?.unref?.(); + // @ts-ignore + runner?.kill?.(9); } - rmSync(root); - expect(reloadCounter).toBe(3); }); it("should hot reload when a file is renamed() into place", async () => { const root = hotRunnerRoot + ".tmp.js"; copyFileSync(hotRunnerRoot, root); - const runner = spawn({ - cmd: [bunExe(), "--hot", "run", root], - env: bunEnv, - stdout: "pipe", - stderr: "inherit", - stdin: "ignore", - }); + try { + var runner = spawn({ + cmd: [bunExe(), "--hot", "run", root], + env: bunEnv, + cwd, + stdout: "pipe", + stderr: "inherit", + stdin: "ignore", + }); - var reloadCounter = 0; + var reloadCounter = 0; - async function onReload() { - const contents = readFileSync(root, "utf-8"); - rmSync(root + ".tmpfile", { force: true }); - await 1; - writeFileSync(root + ".tmpfile", contents); - await 1; - rmSync(root); - await 1; - renameSync(root + ".tmpfile", root); - await 1; - } - - for await (const line of runner.stdout) { - var str = new TextDecoder().decode(line); - var any = false; - for (let line of str.split("\n")) { - if (!line.includes("[#!root]")) continue; - reloadCounter++; - - if (reloadCounter === 3) { - runner.unref(); - runner.kill(); - break; - } - - expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); - any = true; + async function onReload() { + const contents = readFileSync(root, "utf-8"); + rmSync(root + ".tmpfile", { force: true }); + await 1; + writeFileSync(root + ".tmpfile", contents); + await 1; + rmSync(root); + await 1; + renameSync(root + ".tmpfile", root); + await 1; } - if (any) await onReload(); + for await (const line of runner.stdout) { + var str = new TextDecoder().decode(line); + var any = false; + for (let line of str.split("\n")) { + if (!line.includes("[#!root]")) continue; + reloadCounter++; + + if (reloadCounter === 3) { + runner.unref(); + runner.kill(); + break; + } + + expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); + any = true; + } + + if (any) await onReload(); + } + rmSync(root); + expect(reloadCounter).toBe(3); + } finally { + // @ts-ignore + runner?.unref?.(); + // @ts-ignore + runner?.kill?.(9); } - rmSync(root); - expect(reloadCounter).toBe(3); }); diff --git a/test/js/node/watch/fs.watchFile.test.ts b/test/js/node/watch/fs.watchFile.test.ts index ff84cd1841..6a3905b6a7 100644 --- a/test/js/node/watch/fs.watchFile.test.ts +++ b/test/js/node/watch/fs.watchFile.test.ts @@ -7,7 +7,7 @@ import { describe, expect, test } from "bun:test"; // before it is actually watching, we need to repeat the operation to avoid // a race condition. function repeat(fn: any) { - const interval = setInterval(fn, 20); + const interval = setInterval(fn, 20).unref(); return interval; } const encodingFileName = `新建文夹件.txt`; diff --git a/test/preload.ts b/test/preload.ts new file mode 100644 index 0000000000..9216ac179d --- /dev/null +++ b/test/preload.ts @@ -0,0 +1,10 @@ +import * as harness from "./harness"; + +// We make Bun.env read-only +// so process.env = {} causes them to be out of sync and we assume Bun.env is +for (let key in process.env) { + if (key === "TZ") continue; + delete process.env[key]; +} + +Bun.$.env(Object.assign(process.env, harness.bunEnv)); From bdb70d5bc29763bc5ece019366ec566c588f4f37 Mon Sep 17 00:00:00 2001 From: Cameron Haley <42698419+camero2734@users.noreply.github.com> Date: Wed, 21 Feb 2024 23:19:43 +0100 Subject: [PATCH 10/21] Account for initial_thread_count in napi threadsafe_function logic (#9035) --- src/napi/napi.zig | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/napi/napi.zig b/src/napi/napi.zig index a3eb87860e..6af48627e4 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -1279,7 +1279,7 @@ pub const ThreadSafeFunction = struct { /// prevent it from being destroyed. poll_ref: Async.KeepAlive, - owning_threads: std.AutoArrayHashMapUnmanaged(u64, void) = .{}, + thread_count: usize = 0, owning_thread_lock: Lock = Lock.init(), event_loop: *JSC.EventLoop, @@ -1422,24 +1422,33 @@ pub const ThreadSafeFunction = struct { defer this.owning_thread_lock.unlock(); if (this.channel.isClosed()) return error.Closed; - _ = this.owning_threads.getOrPut(bun.default_allocator, std.Thread.getCurrentId()) catch unreachable; + this.thread_count += 1; } - pub fn release(this: *ThreadSafeFunction, mode: napi_threadsafe_function_release_mode) void { + pub fn release(this: *ThreadSafeFunction, mode: napi_threadsafe_function_release_mode) napi_status { this.owning_thread_lock.lock(); defer this.owning_thread_lock.unlock(); - if (!this.owning_threads.swapRemove(std.Thread.getCurrentId())) - return; + + if (this.thread_count == 0) { + return invalidArg(); + } + + this.thread_count -= 1; + + if (this.channel.isClosed()) { + return .ok; + } if (mode == .abort) { this.channel.close(); } - if (this.owning_threads.count() == 0) { + if (mode == .abort or this.thread_count == 0) { this.finalizer_task = JSC.AnyTask{ .ctx = this, .callback = finalize }; this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(this, finalize)); - return; } + + return .ok; } }; @@ -1479,10 +1488,10 @@ pub export fn napi_create_threadsafe_function( }, .ctx = context, .channel = ThreadSafeFunction.Queue.init(max_queue_size, bun.default_allocator), - .owning_threads = .{}, + .thread_count = initial_thread_count, .poll_ref = Async.KeepAlive.init(), }; - function.owning_threads.ensureTotalCapacity(bun.default_allocator, initial_thread_count) catch return genericFailure(); + function.finalizer = .{ .ctx = thread_finalize_data, .fun = thread_finalize_cb }; result.* = function; return .ok; @@ -1512,8 +1521,7 @@ pub export fn napi_acquire_threadsafe_function(func: napi_threadsafe_function) n } pub export fn napi_release_threadsafe_function(func: napi_threadsafe_function, mode: napi_threadsafe_function_release_mode) napi_status { log("napi_release_threadsafe_function", .{}); - func.release(mode); - return .ok; + return func.release(mode); } pub export fn napi_unref_threadsafe_function(env: napi_env, func: napi_threadsafe_function) napi_status { log("napi_unref_threadsafe_function", .{}); From 20275aa0407cf4120077b18d332417ad7a4ee416 Mon Sep 17 00:00:00 2001 From: Kenta Iwasaki <63115601+lithdew@users.noreply.github.com> Date: Thu, 22 Feb 2024 06:31:57 +0800 Subject: [PATCH 11/21] fix(ws/client): handle short reads on payload frame length (#9027) * fix(ws/client): handle short reads on payload frame length In the WebSocket specification, control frames may not be fragmented. However, the frame parser should handle fragmented control frames nonetheless. Whether or not the frame parser is given a set of fragmented bytes to parse is subject to the strategy in which the client buffers received bytes. All stages of the frame parser currently supports parsing frames fragmented across multiple TCP segments except for the payload frame length parsing stage. This commit implements buffering the bytes of a frame's payload length into a client instance so that the websocket client is able to properly parse payload frame lengths despite there being a short read over incoming TCP data. A test is added to test/js/web/websocket/websocket-client-short-read.test.ts which creates a make-shift WebSocket server that performs short writes over a single WebSocket frame. The test passes with this commit. * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/http/websocket_http_client.zig | 36 +++++-- .../websocket-client-short-read.test.ts | 100 ++++++++++++++++++ .../web/websocket/websocket-server-echo.mjs | 2 +- 3 files changed, 129 insertions(+), 9 deletions(-) create mode 100644 test/js/web/websocket/websocket-client-short-read.test.ts diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index 0b5eb1b125..c04ed4ce70 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -939,6 +939,9 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { header_fragment: ?u8 = null, + payload_length_frame_bytes: [8]u8 = [_]u8{0} ** 8, + payload_length_frame_len: u8 = 0, + initial_data_handler: ?*InitialDataHandler = null, event_loop: *JSC.EventLoop = undefined, @@ -1193,6 +1196,12 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { } var header_bytes: [@sizeOf(usize)]u8 = [_]u8{0} ** @sizeOf(usize); + + // In the WebSocket specification, control frames may not be fragmented. + // However, the frame parser should handle fragmented control frames nonetheless. + // Whether or not the frame parser is given a set of fragmented bytes to parse is subject + // to the strategy in which the client buffers and coalesces received bytes. + while (true) { log("onData ({s})", .{@tagName(receive_state)}); @@ -1323,22 +1332,33 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { .extended_payload_length_16 => @as(usize, 2), else => unreachable, }; - // we need to wait for more data - if (data.len == 0) return; - if (data.len < byte_size) { - this.terminate(ErrorCode.control_frame_is_fragmented); - terminated = true; + // we need to wait for more data + if (data.len == 0) { + break; + } + + // copy available payload length bytes to a buffer held on this client instance + const total_received = @min(byte_size - this.payload_length_frame_len, data.len); + @memcpy(this.payload_length_frame_bytes[this.payload_length_frame_len..][0..total_received], data[0..total_received]); + this.payload_length_frame_len += @intCast(total_received); + data = data[total_received..]; + + // short read on payload length - we need to wait for more data + // whatever bytes were returned from the short read are kept in `payload_length_frame_bytes` + if (this.payload_length_frame_len < byte_size) { break; } // Multibyte length quantities are expressed in network byte order receive_body_remain = switch (byte_size) { - 8 => @as(usize, std.mem.readInt(u64, data[0..8], .big)), - 2 => @as(usize, std.mem.readInt(u16, data[0..2], .big)), + 8 => @as(usize, std.mem.readInt(u64, this.payload_length_frame_bytes[0..8], .big)), + 2 => @as(usize, std.mem.readInt(u16, this.payload_length_frame_bytes[0..2], .big)), else => unreachable, }; - data = data[byte_size..]; + + this.payload_length_frame_len = 0; + receive_state = .need_body; if (receive_body_remain == 0) { diff --git a/test/js/web/websocket/websocket-client-short-read.test.ts b/test/js/web/websocket/websocket-client-short-read.test.ts new file mode 100644 index 0000000000..22b3807dbe --- /dev/null +++ b/test/js/web/websocket/websocket-client-short-read.test.ts @@ -0,0 +1,100 @@ +import { TCPSocketListener } from "bun"; +import { describe, test, expect } from "bun:test"; +import { WebSocket } from "ws"; + +const hostname = process.env.HOST || "127.0.0.1"; +const port = parseInt(process.env.PORT || "0"); + +describe("WebSocket", () => { + test("short read on payload length", async () => { + let server: TCPSocketListener | undefined; + let client: WebSocket | undefined; + let init = false; + + try { + server = Bun.listen({ + socket: { + data(socket, data) { + if (init) { + return; + } + + init = true; + + const frame = data.toString("utf-8"); + if (!frame.startsWith("GET")) { + throw new Error("Invalid handshake"); + } + + const magic = /Sec-WebSocket-Key: (.*)\r\n/.exec(frame); + if (!magic) { + throw new Error("Missing Sec-WebSocket-Key"); + } + + const hasher = new Bun.CryptoHasher("sha1"); + hasher.update(magic[1]); + hasher.update("258EAFA5-E914-47DA-95CA-C5AB0DC85B11"); + const accept = hasher.digest("base64"); + + // Respond with a websocket handshake. + socket.write( + "HTTP/1.1 101 Switching Protocols\r\n" + + "Upgrade: websocket\r\n" + + "Connection: Upgrade\r\n" + + `Sec-WebSocket-Accept: ${accept}\r\n` + + "\r\n", + ); + socket.flush(); + + // Partially write a websocket text frame with an incomplete big-endian u16 length. + socket.write(Uint8Array.from([129, 126, 0])); + socket.flush(); + + // Write the remainder of the websocket text frame. + setTimeout(() => { + socket.write( + Uint8Array.from([ + 253, 123, 34, 106, 115, 111, 110, 114, 112, 99, 34, 58, 34, 50, 46, 48, 34, 44, 34, 109, 101, 116, + 104, 111, 100, 34, 58, 34, 116, 114, 97, 110, 115, 97, 99, 116, 105, 111, 110, 78, 111, 116, 105, 102, + 105, 99, 97, 116, 105, 111, 110, 34, 44, 34, 112, 97, 114, 97, 109, 115, 34, 58, 123, 34, 114, 101, + 115, 117, 108, 116, 34, 58, 123, 34, 99, 111, 110, 116, 101, 120, 116, 34, 58, 123, 34, 115, 108, 111, + 116, 34, 58, 50, 52, 57, 54, 48, 50, 49, 55, 57, 125, 44, 34, 118, 97, 108, 117, 101, 34, 58, 123, 34, + 115, 105, 103, 110, 97, 116, 117, 114, 101, 34, 58, 34, 50, 80, 50, 120, 102, 51, 109, 85, 49, 118, + 114, 110, 89, 99, 100, 49, 76, 105, 99, 104, 56, 69, 76, 104, 104, 88, 120, 55, 50, 111, 67, 105, 110, + 77, 97, 81, 88, 101, 113, 106, 118, 68, 55, 111, 52, 101, 75, 77, 53, 70, 66, 51, 78, 76, 97, 104, 86, + 55, 68, 87, 101, 81, 106, 105, 102, 98, 107, 53, 56, 75, 121, 104, 66, 119, 98, 119, 88, 49, 104, 103, + 119, 103, 112, 112, 102, 118, 77, 71, 34, 44, 34, 115, 108, 111, 116, 34, 58, 50, 52, 57, 54, 48, 50, + 49, 55, 57, 125, 125, 44, 34, 115, 117, 98, 115, 99, 114, 105, 112, 116, 105, 111, 110, 34, 58, 52, + 48, 50, 56, 125, 125, + ]), + ); + socket.flush(); + }, 0); + }, + }, + hostname, + port, + }); + + const { promise, resolve } = Promise.withResolvers(); + + client = new WebSocket(`ws://${server.hostname}:${server.port}`); + client.addEventListener("error", err => { + throw new Error(err.message); + }); + client.addEventListener("close", err => { + if (!err.wasClean) { + throw new Error(err.reason); + } + }); + client.addEventListener("message", event => resolve(event.data.toString("utf-8"))); + + expect(await promise).toEqual( + `{"jsonrpc":"2.0","method":"transactionNotification","params":{"result":{"context":{"slot":249602179},"value":{"signature":"2P2xf3mU1vrnYcd1Lich8ELhhXx72oCinMaQXeqjvD7o4eKM5FB3NLahV7DWeQjifbk58KyhBwbwX1hgwgppfvMG","slot":249602179}},"subscription":4028}}`, + ); + } finally { + client?.close(); + server?.stop(true); + } + }); +}); diff --git a/test/js/web/websocket/websocket-server-echo.mjs b/test/js/web/websocket/websocket-server-echo.mjs index 7ce1b205b4..4962c1127d 100644 --- a/test/js/web/websocket/websocket-server-echo.mjs +++ b/test/js/web/websocket/websocket-server-echo.mjs @@ -90,5 +90,5 @@ process.on("exit", exitCode => { }); const hostname = process.env.HOST || "127.0.0.1"; -const port = parseInt(process.env.PORT) || 0; +const port = parseInt(process.env.PORT || "0"); server.listen(port, hostname); From 9ee39cac8b237f79a5aeb23dca9b16fe179f8695 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:47:43 -0800 Subject: [PATCH 12/21] fix(install): semver prerelease bugfix (#9026) * make sure prereleases match correctly * add the file * few more tests * make sure pre is in query, not group --- src/install/semver.zig | 117 ++++++++++++++++++++++---------- test/cli/install/semver.test.ts | 10 +++ 2 files changed, 93 insertions(+), 34 deletions(-) diff --git a/src/install/semver.zig b/src/install/semver.zig index 7b47619f3b..25c3485f9b 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -1394,7 +1394,6 @@ pub const Range = struct { version: Version, comparator_buf: string, version_buf: string, - include_pre: bool, ) bool { const order = version.orderWithoutBuild(comparator.version, version_buf, comparator_buf); @@ -1404,11 +1403,11 @@ pub const Range = struct { else => false, }, .gt => switch (comparator.op) { - .gt, .gte => if (!include_pre) false else true, + .gt, .gte => true, else => false, }, .lt => switch (comparator.op) { - .lt, .lte => if (!include_pre) false else true, + .lt, .lte => true, else => false, }, }; @@ -1423,39 +1422,46 @@ pub const Range = struct { return true; } - // When the boundaries of a range do not include a pre-release tag on either side, - // we should not consider that '7.0.0-rc2' < "7.0.0" - // ``` - // > semver.satisfies("7.0.0-rc2", "<=7.0.0") - // false - // > semver.satisfies("7.0.0-rc2", ">=7.0.0") - // false - // > semver.satisfies("7.0.0-rc2", "<=7.0.0-rc2") - // true - // > semver.satisfies("7.0.0-rc2", ">=7.0.0-rc2") - // true - // ``` - // - // - https://github.com/npm/node-semver#prerelease-tags - // - https://github.com/npm/node-semver/blob/cce61804ba6f997225a1267135c06676fe0524d2/classes/range.js#L505-L539 - var include_pre = true; - if (version.tag.hasPre()) { - if (!has_right) { - if (!range.left.version.tag.hasPre()) { - include_pre = false; - } - } else { - if (!range.left.version.tag.hasPre() and !range.right.version.tag.hasPre()) { - include_pre = false; - } - } - } - - if (!range.left.satisfies(version, range_buf, version_buf, include_pre)) { + if (!range.left.satisfies(version, range_buf, version_buf)) { return false; } - if (has_right and !range.right.satisfies(version, range_buf, version_buf, include_pre)) { + if (has_right and !range.right.satisfies(version, range_buf, version_buf)) { + return false; + } + + return true; + } + + pub fn satisfiesPre(range: Range, version: Version, range_buf: string, version_buf: string, pre_matched: *bool) bool { + if (comptime Environment.allow_assert) { + std.debug.assert(version.tag.hasPre()); + } + const has_left = range.hasLeft(); + const has_right = range.hasRight(); + + if (!has_left) { + return true; + } + + // If left has prerelease check if major,minor,patch matches with left. If + // not, check the same with right if right exists and has prerelease. + pre_matched.* = pre_matched.* or + (range.left.version.tag.hasPre() and + version.patch == range.left.version.patch and + version.minor == range.left.version.minor and + version.major == range.left.version.major) or + (has_right and + range.right.version.tag.hasPre() and + version.patch == range.right.version.patch and + version.minor == range.right.version.minor and + version.major == range.right.version.major); + + if (!range.left.satisfies(version, range_buf, version_buf)) { + return false; + } + + if (has_right and !range.right.satisfies(version, range_buf, version_buf)) { return false; } @@ -1502,6 +1508,29 @@ pub const Query = struct { ); } + pub fn satisfiesPre(list: *const List, version: Version, list_buf: string, version_buf: string) bool { + if (comptime Environment.allow_assert) { + std.debug.assert(version.tag.hasPre()); + } + + // `version` has a prerelease tag: + // - needs to satisfy each comparator in the query ( AND AND ...) like normal comparison + // - if it does, also needs to match major, minor, patch with at least one of the other versions + // with a prerelease + // https://github.com/npm/node-semver/blob/ac9b35769ab0ddfefd5a3af4a3ecaf3da2012352/classes/range.js#L505 + var pre_matched = false; + return (list.head.satisfiesPre( + version, + list_buf, + version_buf, + &pre_matched, + ) and pre_matched) or (list.next orelse return false).satisfiesPre( + version, + list_buf, + version_buf, + ); + } + pub fn eql(lhs: *const List, rhs: *const List) bool { if (!lhs.head.eql(&rhs.head)) return false; @@ -1647,7 +1676,10 @@ pub const Query = struct { group_buf: string, version_buf: string, ) bool { - return group.head.satisfies(version, group_buf, version_buf); + return if (version.tag.hasPre()) + group.head.satisfiesPre(version, group_buf, version_buf) + else + group.head.satisfies(version, group_buf, version_buf); } }; @@ -1672,6 +1704,23 @@ pub const Query = struct { ); } + pub fn satisfiesPre(query: *const Query, version: Version, query_buf: string, version_buf: string, pre_matched: *bool) bool { + if (comptime Environment.allow_assert) { + std.debug.assert(version.tag.hasPre()); + } + return query.range.satisfiesPre( + version, + query_buf, + version_buf, + pre_matched, + ) and (query.next orelse return true).satisfiesPre( + version, + query_buf, + version_buf, + pre_matched, + ); + } + const Token = struct { tag: Tag = Tag.none, wildcard: Wildcard = Wildcard.none, diff --git a/test/cli/install/semver.test.ts b/test/cli/install/semver.test.ts index 8c955a092f..c9c5efc9ef 100644 --- a/test/cli/install/semver.test.ts +++ b/test/cli/install/semver.test.ts @@ -340,6 +340,16 @@ describe("Bun.semver.satisfies()", () => { testSatisfies("5.0 || 1.2 - 1.3", "5.0.2", true); testSatisfies("5.0 || 1.2 - 1.3 || >8", "9.0.2", true); + testSatisfies(">=0.34.0-next.3 <1.0.0", "0.34.0-next.8", true); + testSatisfies("<1.0.0", "0.34.0-next.8", false); + + testSatisfies("<=7.0.0", "7.0.0-rc2", false); + testSatisfies(">=7.0.0", "7.0.0-rc2", false); + testSatisfies("<=7.0.0-rc2", "7.0.0-rc2", true); + testSatisfies(">=7.0.0-rc2", "7.0.0-rc2", true); + + testSatisfies("^1.2.3-pr.1 || >=1.2.4-alpha", "1.2.4-alpha.notready", true); + const notPassing = [ "0.1.0", "0.10.0", From 2c6cd2439331c840b6bd653e25840a3140ca763b Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 21 Feb 2024 18:31:14 -0800 Subject: [PATCH 13/21] Implement expect().toBeOneOf(), fix small memory leaks in expect matchers (#9043) * Add .toBeOneOf * Fix memory leaks in .toContain(), .toInclude(), toContainKeys(), toBeTypeOf(), toEqualIgnoringWhitespace * Handle exception * Ignore non-bool * Propagate errors when the message callback throws * fixups * Update preload.ts * Update jest-extended.test.js * Update expect.zig * comments --------- Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- packages/bun-types/test.d.ts | 13 ++ src/bun.js/bindings/bindings.zig | 10 +- src/bun.js/test/expect.zig | 196 +++++++++++++++++----- src/bun.js/test/jest.classes.ts | 4 + src/bun.js/webcore/encoding.zig | 6 +- src/bun.js/webcore/streams.zig | 16 +- test/js/bun/test/expect-extend.test.js | 28 +++- test/js/bun/test/expect-extend.types.d.ts | 1 + test/js/bun/test/jest-extended.test.js | 34 ++++ test/js/node/process/process.test.js | 7 +- test/preload.ts | 11 +- 11 files changed, 267 insertions(+), 59 deletions(-) diff --git a/packages/bun-types/test.d.ts b/packages/bun-types/test.d.ts index 4e26cf8919..33e8c0d8d6 100644 --- a/packages/bun-types/test.d.ts +++ b/packages/bun-types/test.d.ts @@ -873,6 +873,19 @@ declare module "bun:test" { * @param expected the expected value */ toStrictEqual(expected: T): void; + /** + * Asserts that the value is deep equal to an element in the expected array. + * + * The value must be an array or iterable, which includes strings. + * + * @example + * expect(1).toBeOneOf([1,2,3]); + * expect("foo").toBeOneOf(["foo", "bar"]); + * expect(true).toBeOneOf(new Set([true])); + * + * @param expected the expected value + */ + toBeOneOf(expected: Array | Iterable): void; /** * Asserts that a value contains what is expected. * diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index f4ffdc5a32..26bcd5d7f3 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -4094,6 +4094,11 @@ pub const JSValue = enum(JSValueReprInt) { }); } + pub fn hasOwnPropertyValue(this: JSValue, globalThis: *JSGlobalObject, value: JSC.JSValue) bool { + // TODO: add a binding for this + return hasOwnProperty(this, globalThis, value.getZigString(globalThis)); + } + pub fn hasOwnProperty(this: JSValue, globalThis: *JSGlobalObject, key: ZigString) bool { return cppFn("hasOwnProperty", .{ this, globalThis, key }); } @@ -4489,8 +4494,9 @@ pub const JSValue = enum(JSValueReprInt) { /// Call `toString()` on the JSValue and clone the result. /// On exception, this returns null. pub fn toSliceOrNull(this: JSValue, globalThis: *JSGlobalObject) ?ZigString.Slice { - var str = this.toStringOrNull(globalThis) orelse return null; - return str.toSlice(globalThis, globalThis.allocator()); + const str = bun.String.tryFromJS(this, globalThis) orelse return null; + defer str.deref(); + return str.toUTF8(bun.default_allocator); } /// Call `toString()` on the JSValue and clone the result. diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index 990c1eb881..85e7f70fba 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -596,6 +596,92 @@ pub const Expect = struct { return .zero; } + pub fn toBeOneOf( + this: *Expect, + globalObject: *JSC.JSGlobalObject, + callFrame: *JSC.CallFrame, + ) callconv(.C) JSC.JSValue { + defer this.postMatch(globalObject); + const thisValue = callFrame.this(); + const arguments_ = callFrame.arguments(1); + const arguments = arguments_.ptr[0..arguments_.len]; + + if (arguments.len < 1) { + globalObject.throwInvalidArguments("toBeOneOf() takes 1 argument", .{}); + return .zero; + } + + incrementExpectCallCounter(); + + const expected = this.getValue(globalObject, thisValue, "toBeOneOf", "expected") orelse return .zero; + const list_value: JSValue = arguments[0]; + + const not = this.flags.not; + var pass = false; + + const ExpectedEntry = struct { + globalObject: *JSC.JSGlobalObject, + expected: JSValue, + pass: *bool, + }; + + if (list_value.jsTypeLoose().isArrayLike()) { + var itr = list_value.arrayIterator(globalObject); + while (itr.next()) |item| { + // Confusingly, jest-extended uses `deepEqual`, instead of `toBe` + if (item.jestDeepEquals(expected, globalObject)) { + pass = true; + break; + } + } + } else if (list_value.isIterable(globalObject)) { + var expected_entry = ExpectedEntry{ + .globalObject = globalObject, + .expected = expected, + .pass = &pass, + }; + list_value.forEach(globalObject, &expected_entry, struct { + pub fn sameValueIterator( + _: *JSC.VM, + _: *JSGlobalObject, + entry_: ?*anyopaque, + item: JSValue, + ) callconv(.C) void { + const entry = bun.cast(*ExpectedEntry, entry_.?); + // Confusingly, jest-extended uses `deepEqual`, instead of `toBe` + if (item.jestDeepEquals(entry.expected, entry.globalObject)) { + entry.pass.* = true; + // TODO(perf): break out of the `forEach` when a match is found + } + } + }.sameValueIterator); + } else { + globalObject.throw("Received value must be an array type, or both received and expected values must be strings.", .{}); + return .zero; + } + + if (not) pass = !pass; + if (pass) return .undefined; + + // handle failure + var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalObject, .quote_strings = true }; + const value_fmt = list_value.toFmt(globalObject, &formatter); + const expected_fmt = expected.toFmt(globalObject, &formatter); + if (not) { + const received_fmt = list_value.toFmt(globalObject, &formatter); + const expected_line = "Expected to not be one of: {any}\nReceived: {any}\n"; + const fmt = comptime getSignature("toBeOneOf", "expected", true) ++ "\n\n" ++ expected_line; + globalObject.throwPretty(fmt, .{ received_fmt, expected_fmt }); + return .zero; + } + + const expected_line = "Expected to be one of: {any}\n"; + const received_line = "Received: {any}\n"; + const fmt = comptime getSignature("toBeOneOf", "expected", false) ++ "\n\n" ++ expected_line ++ received_line; + globalObject.throwPretty(fmt, .{ value_fmt, expected_fmt }); + return .zero; + } + pub fn toContain( this: *Expect, globalObject: *JSC.JSGlobalObject, @@ -635,9 +721,9 @@ pub const Expect = struct { } } } else if (value.isStringLiteral() and expected.isStringLiteral()) { - const value_string = value.toString(globalObject).toSlice(globalObject, default_allocator); + const value_string = value.toSlice(globalObject, default_allocator); defer value_string.deinit(); - const expected_string = expected.toString(globalObject).toSlice(globalObject, default_allocator); + const expected_string = expected.toSlice(globalObject, default_allocator); defer expected_string.deinit(); if (expected_string.len == 0) { // edge case empty string is always contained @@ -681,7 +767,7 @@ pub const Expect = struct { const expected_fmt = expected.toFmt(globalObject, &formatter); if (not) { const received_fmt = value.toFmt(globalObject, &formatter); - const expected_line = "Expected to not contain: {any}\n\nReceived: {any}\n"; + const expected_line = "Expected to not contain: {any}\nReceived: {any}\n"; const fmt = comptime getSignature("toContain", "expected", true) ++ "\n\n" ++ expected_line; globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt }); return .zero; @@ -716,7 +802,7 @@ pub const Expect = struct { const value: JSValue = this.getValue(globalObject, thisValue, "toContainKey", "expected") orelse return .zero; const not = this.flags.not; - var pass = value.hasOwnProperty(globalObject, expected.toString(globalObject).getZigString(globalObject)); + var pass = value.hasOwnPropertyValue(globalObject, expected); if (not) pass = !pass; if (pass) return thisValue; @@ -727,7 +813,7 @@ pub const Expect = struct { const expected_fmt = expected.toFmt(globalObject, &formatter); if (not) { const received_fmt = value.toFmt(globalObject, &formatter); - const expected_line = "Expected to not contain: {any}\n\nReceived: {any}\n"; + const expected_line = "Expected to not contain: {any}\nReceived: {any}\n"; const fmt = comptime getSignature("toContainKey", "expected", true) ++ "\n\n" ++ expected_line; globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt }); return .zero; @@ -776,7 +862,7 @@ pub const Expect = struct { while (i < count) : (i += 1) { const key = expected.getIndex(globalObject, i); - if (!value.hasOwnProperty(globalObject, key.toString(globalObject).getZigString(globalObject))) { + if (!value.hasOwnPropertyValue(globalObject, key)) { pass = false; break; } @@ -791,7 +877,7 @@ pub const Expect = struct { const expected_fmt = expected.toFmt(globalObject, &formatter); if (not) { const received_fmt = value.toFmt(globalObject, &formatter); - const expected_line = "Expected to not contain: {any}\n\nReceived: {any}\n"; + const expected_line = "Expected to not contain: {any}\nReceived: {any}\n"; const fmt = comptime getSignature("toContainKeys", "expected", true) ++ "\n\n" ++ expected_line; globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt }); return .zero; @@ -840,7 +926,7 @@ pub const Expect = struct { while (i < count) : (i += 1) { const key = expected.getIndex(globalObject, i); - if (value.hasOwnProperty(globalObject, key.toString(globalObject).getZigString(globalObject))) { + if (value.hasOwnPropertyValue(globalObject, key)) { pass = true; break; } @@ -855,7 +941,7 @@ pub const Expect = struct { const expected_fmt = expected.toFmt(globalObject, &formatter); if (not) { const received_fmt = value.toFmt(globalObject, &formatter); - const expected_line = "Expected to not contain: {any}\n\nReceived: {any}\n"; + const expected_line = "Expected to not contain: {any}\nReceived: {any}\n"; const fmt = comptime getSignature("toContainAnyKeys", "expected", true) ++ "\n\n" ++ expected_line; globalObject.throwPretty(fmt, .{ expected_fmt, received_fmt }); return .zero; @@ -911,9 +997,9 @@ pub const Expect = struct { } } else if (value_type.isStringLike() and expected_type.isStringLike()) { if (expected_type.isStringObjectLike() and value_type.isString()) pass = false else { - const value_string = value.toString(globalObject).toSlice(globalObject, default_allocator); + const value_string = value.toSliceOrNull(globalObject) orelse return .zero; defer value_string.deinit(); - const expected_string = expected.toString(globalObject).toSlice(globalObject, default_allocator); + const expected_string = expected.toSliceOrNull(globalObject) orelse return .zero; defer expected_string.deinit(); // jest does not have a `typeof === "string"` check for `toContainEqual`. @@ -2498,18 +2584,19 @@ pub const Expect = struct { const expected = arguments[0]; expected.ensureStillAlive(); - const expectedAsStr = expected.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - incrementExpectCallCounter(); - if (!expected.isString()) { globalThis.throwInvalidArguments("toBeTypeOf() requires a string argument", .{}); return .zero; } - if (!JSTypeOfMap.has(expectedAsStr)) { + const expected_type = expected.toBunString(globalThis); + defer expected_type.deref(); + incrementExpectCallCounter(); + + const typeof = expected_type.inMap(JSTypeOfMap) orelse { globalThis.throwInvalidArguments("toBeTypeOf() requires a valid type string argument ('function', 'object', 'bigint', 'boolean', 'number', 'string', 'symbol', 'undefined')", .{}); return .zero; - } + }; const not = this.flags.not; var pass = false; @@ -2537,7 +2624,7 @@ pub const Expect = struct { return .zero; } - pass = strings.eql(expectedAsStr, whatIsTheType); + pass = strings.eql(typeof, whatIsTheType); if (not) pass = !pass; if (pass) return .undefined; @@ -2880,19 +2967,24 @@ pub const Expect = struct { var pass = value.isString() and expected.isString(); if (pass) { - const valueStr = value.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - const expectedStr = expected.toString(globalThis).toSlice(globalThis, default_allocator).slice(); + const value_slice = value.toSlice(globalThis, default_allocator); + defer value_slice.deinit(); + const expected_slice = expected.toSlice(globalThis, default_allocator); + defer expected_slice.deinit(); + + const value_utf8 = value_slice.slice(); + const expected_utf8 = expected_slice.slice(); var left: usize = 0; var right: usize = 0; // Skip leading whitespaces - while (left < valueStr.len and std.ascii.isWhitespace(valueStr[left])) left += 1; - while (right < expectedStr.len and std.ascii.isWhitespace(expectedStr[right])) right += 1; + while (left < value_utf8.len and std.ascii.isWhitespace(value_utf8[left])) left += 1; + while (right < expected_utf8.len and std.ascii.isWhitespace(expected_utf8[right])) right += 1; - while (left < valueStr.len and right < expectedStr.len) { - const left_char = valueStr[left]; - const right_char = expectedStr[right]; + while (left < value_utf8.len and right < expected_utf8.len) { + const left_char = value_utf8[left]; + const right_char = expected_utf8[right]; if (left_char != right_char) { pass = false; @@ -2903,11 +2995,11 @@ pub const Expect = struct { right += 1; // Skip trailing whitespaces - while (left < valueStr.len and std.ascii.isWhitespace(valueStr[left])) left += 1; - while (right < expectedStr.len and std.ascii.isWhitespace(expectedStr[right])) right += 1; + while (left < value_utf8.len and std.ascii.isWhitespace(value_utf8[left])) left += 1; + while (right < expected_utf8.len and std.ascii.isWhitespace(expected_utf8[right])) right += 1; } - if (left < valueStr.len or right < expectedStr.len) { + if (left < value_utf8.len or right < expected_utf8.len) { pass = false; } } @@ -3093,9 +3185,11 @@ pub const Expect = struct { var pass = value.isString(); if (pass) { - const value_string = value.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - const expected_string = expected.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - pass = strings.contains(value_string, expected_string) or expected_string.len == 0; + const value_string = value.toSliceOrNull(globalThis) orelse return .zero; + defer value_string.deinit(); + const expected_string = expected.toSliceOrNull(globalThis) orelse return .zero; + defer expected_string.deinit(); + pass = strings.contains(value_string.slice(), expected_string.slice()) or expected_string.len == 0; } const not = this.flags.not; @@ -3326,9 +3420,11 @@ pub const Expect = struct { var pass = value.isString(); if (pass) { - const value_string = value.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - const expected_string = expected.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - pass = strings.startsWith(value_string, expected_string) or expected_string.len == 0; + const value_string = value.toSliceOrNull(globalThis) orelse return .zero; + defer value_string.deinit(); + const expected_string = expected.toSliceOrNull(globalThis) orelse return .zero; + defer expected_string.deinit(); + pass = strings.startsWith(value_string.slice(), expected_string.slice()) or expected_string.len == 0; } const not = this.flags.not; @@ -3381,9 +3477,11 @@ pub const Expect = struct { var pass = value.isString(); if (pass) { - const value_string = value.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - const expected_string = expected.toString(globalThis).toSlice(globalThis, default_allocator).slice(); - pass = strings.endsWith(value_string, expected_string) or expected_string.len == 0; + const value_string = value.toSliceOrNull(globalThis) orelse return .zero; + defer value_string.deinit(); + const expected_string = expected.toSliceOrNull(globalThis) orelse return .zero; + defer expected_string.deinit(); + pass = strings.endsWith(value_string.slice(), expected_string.slice()) or expected_string.len == 0; } const not = this.flags.not; @@ -4089,7 +4187,8 @@ pub const Expect = struct { const is_valid = valid: { if (result.isObject()) { if (result.get(globalObject, "pass")) |pass_value| { - pass = pass_value.toBoolean(); + pass = pass_value.toBooleanSlow(globalObject); + if (globalObject.hasException()) return false; if (result.get(globalObject, "message")) |message_value| { if (!message_value.isString() and !message_value.isCallable(globalObject.vm())) { @@ -4120,16 +4219,28 @@ pub const Expect = struct { message_text = bun.String.static("No message was specified for this matcher."); } else if (message.isString()) { message_text = message.toBunString(globalObject); - } else { // callable + } else { + if (comptime Environment.allow_assert) + std.debug.assert(message.isCallable(globalObject.vm())); // checked above + var message_result = message.callWithGlobalThis(globalObject, &[_]JSValue{}); std.debug.assert(!message_result.isEmpty()); if (message_result.toError()) |err| { globalObject.throwValue(err); return false; } - if (message_result.toStringOrNull(globalObject)) |str| { - message_text = bun.String.init(str.getZigString(globalObject)); + if (bun.String.tryFromJS(message_result, globalObject)) |str| { + message_text = str; } else { + if (globalObject.hasException()) return false; + var formatter = JSC.ConsoleObject.Formatter{ + .globalThis = globalObject, + .quote_strings = true, + }; + globalObject.throw( + "Expected custom matcher message to return a string, but got: {}", + .{message_result.toFmt(globalObject, &formatter)}, + ); return false; } } @@ -4900,7 +5011,8 @@ pub const ExpectMatcherUtils = struct { globalObject.throw("matcherHint: the first argument (matcher name) must be a string", .{}); return .zero; } - const matcher_name = bun.String.init(arguments[0].toString(globalObject).getZigString(globalObject)); + const matcher_name = arguments[0].toBunString(globalObject); + defer matcher_name.deref(); const received = if (arguments.len > 1) arguments[1] else bun.String.static("received").toJS(globalObject); const expected = if (arguments.len > 2) arguments[2] else bun.String.static("expected").toJS(globalObject); @@ -4921,7 +5033,7 @@ pub const ExpectMatcherUtils = struct { return .zero; } if (options.get(globalObject, "isNot")) |val| { - is_not = val.toBoolean(); + is_not = val.coerce(bool, globalObject); } if (options.get(globalObject, "comment")) |val| { comment = val.toStringOrNull(globalObject); diff --git a/src/bun.js/test/jest.classes.ts b/src/bun.js/test/jest.classes.ts index 9d5a5b8b14..62905a5a5a 100644 --- a/src/bun.js/test/jest.classes.ts +++ b/src/bun.js/test/jest.classes.ts @@ -470,6 +470,10 @@ export default [ fn: "toThrowErrorMatchingInlineSnapshot", length: 1, }, + toBeOneOf: { + fn: "toBeOneOf", + length: 1, + }, not: { getter: "getNot", this: true, diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index 50b1737953..74d9731c62 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -600,9 +600,13 @@ pub const TextDecoder = struct { if (arguments.len > 1 and arguments[1].isObject()) { if (arguments[1].get(globalThis, "stream")) |stream| { - if (stream.toBoolean()) { + if (stream.coerce(bool, globalThis)) { return this.decodeSlice(globalThis, array_buffer.slice(), true); } + + if (globalThis.hasException()) { + return JSValue.zero; + } } } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 24fe68d0da..2b56ac26c8 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -500,14 +500,18 @@ pub const StreamStart = union(Tag) { var chunk_size: JSC.WebCore.Blob.SizeType = 0; var empty = true; - if (value.get(globalThis, "asUint8Array")) |as_array| { - as_uint8array = as_array.toBoolean(); - empty = false; + if (value.get(globalThis, "asUint8Array")) |val| { + if (val.isBoolean()) { + as_uint8array = val.toBoolean(); + empty = false; + } } - if (value.get(globalThis, "stream")) |as_array| { - stream = as_array.toBoolean(); - empty = false; + if (value.get(globalThis, "stream")) |val| { + if (val.isBoolean()) { + stream = val.toBoolean(); + empty = false; + } } if (value.get(globalThis, "highWaterMark")) |chunkSize| { diff --git a/test/js/bun/test/expect-extend.test.js b/test/js/bun/test/expect-extend.test.js index 14a2acd158..1ef792e2fa 100644 --- a/test/js/bun/test/expect-extend.test.js +++ b/test/js/bun/test/expect-extend.test.js @@ -7,12 +7,23 @@ * `NODE_OPTIONS=--experimental-vm-modules npx jest test/js/bun/test/expect-extend.test.js` */ +import { withoutAggressiveGC } from "harness"; import test_interop from "./test-interop.js"; var { isBun, expect, describe, test, it } = await test_interop(); //expect.addSnapshotSerializer(alignedAnsiStyleSerializer); expect.extend({ + // @ts-expect-error + _toHaveMessageThatThrows(actual, expected) { + const message = () => ({ + [Symbol.toPrimitive]: () => { + throw new Error("i have successfully propagated the error message!"); + }, + }); + + return { message, pass: 42 }; + }, _toBeDivisibleBy(actual, expected) { const pass = typeof actual === "number" && actual % expected === 0; const message = pass @@ -308,8 +319,17 @@ describe("async support", () => { }); it("should not crash under intensive usage", () => { - for (let i = 0; i < 10000; ++i) { - expect(i)._toBeDivisibleBy(1); - expect(i).toEqual(expect._toBeDivisibleBy(1)); - } + withoutAggressiveGC(() => { + for (let i = 0; i < 10000; ++i) { + expect(i)._toBeDivisibleBy(1); + expect(i).toEqual(expect._toBeDivisibleBy(1)); + } + }); + Bun.gc(true); +}); + +it("should propagate errors from calling .toString() on the message callback value", () => { + expect(() => expect("abc").not._toHaveMessageThatThrows("def")).toThrow( + "i have successfully propagated the error message!", + ); }); diff --git a/test/js/bun/test/expect-extend.types.d.ts b/test/js/bun/test/expect-extend.types.d.ts index 50b4ea5a82..e197eb8d5c 100644 --- a/test/js/bun/test/expect-extend.types.d.ts +++ b/test/js/bun/test/expect-extend.types.d.ts @@ -16,6 +16,7 @@ interface CustomMatchersForTest { _toCustomB(): any; _toThrowErrorMatchingSnapshot(): any; // TODO: remove when implemented + _toHaveMessageThatThrows(a: any): any; } declare module "bun:test" { diff --git a/test/js/bun/test/jest-extended.test.js b/test/js/bun/test/jest-extended.test.js index 8b0614c13a..0cc9c42aac 100644 --- a/test/js/bun/test/jest-extended.test.js +++ b/test/js/bun/test/jest-extended.test.js @@ -99,6 +99,40 @@ describe("jest-extended", () => { // toBeOneOf('toSatisfy()') + test("toBeOneOf()", () => { + expect(1).toBeOneOf([1, 2, 3]); + expect(2).toBeOneOf([1, 2, 3]); + expect(3).toBeOneOf([1, 2, 3]); + expect(4).not.toBeOneOf([1, 2, 3]); + expect("a").toBeOneOf(["a", "b", "c"]); + expect("b").toBeOneOf(["a", "b", "c"]); + expect("c").toBeOneOf(["a", "b", "c"]); + expect("d").not.toBeOneOf(["a", "b", "c"]); + expect(true).toBeOneOf([true, false]); + expect(false).toBeOneOf([true, false]); + expect(null).toBeOneOf([null, undefined]); + expect(undefined).toBeOneOf([null, undefined]); + const abc = { c: 1 }; + expect({}).not.toBeOneOf([{ b: 1 }, []]); + expect(abc).toBeOneOf([abc, {}]); + expect({}).not.toBeOneOf([abc, { a: 1 }]); + try { + expect(0).toBeOneOf([1, 2]); + expect.unreachable(); + } catch (e) { + expect(e.message).not.toContain("unreachable"); + if (typeof Bun === "object") expect(Bun.inspect(e)).not.toBeEmpty(); // verify that logging it doesn't cause a crash + } + + try { + expect(1).not.toBeOneOf([1, 2]); + expect.unreachable(); + } catch (e) { + expect(e.message).not.toContain("unreachable"); + if (typeof Bun === "object") expect(Bun.inspect(e)).not.toBeEmpty(); // verify that logging it doesn't cause a crash + } + }); + test("toBeNil()", () => { expect(null).toBeNil(); expect(undefined).toBeNil(); diff --git a/test/js/node/process/process.test.js b/test/js/node/process/process.test.js index 62646daefe..e81891e27c 100644 --- a/test/js/node/process/process.test.js +++ b/test/js/node/process/process.test.js @@ -70,9 +70,10 @@ it("process.release", () => { expect(process.release.name).toBe("node"); const platform = process.platform == "win32" ? "windows" : process.platform; const arch = { arm64: "aarch64", x64: "x64" }[process.arch] || process.arch; - expect(process.release.sourceUrl).toEqual( - `https://github.com/oven-sh/bun/releases/download/bun-v${process.versions.bun}/bun-${platform}-${arch}.zip`, - ); + const nonbaseline = `https://github.com/oven-sh/bun/releases/download/bun-v${process.versions.bun}/bun-${platform}-${arch}.zip`; + const baseline = `https://github.com/oven-sh/bun/releases/download/bun-v${process.versions.bun}/bun-${platform}-${arch}-baseline.zip`; + + expect(process.release.sourceUrl).toBeOneOf([nonbaseline, baseline]); }); it("process.env", () => { diff --git a/test/preload.ts b/test/preload.ts index 9216ac179d..5e472661a6 100644 --- a/test/preload.ts +++ b/test/preload.ts @@ -7,4 +7,13 @@ for (let key in process.env) { delete process.env[key]; } -Bun.$.env(Object.assign(process.env, harness.bunEnv)); +for (let key in harness.bunEnv) { + if (key === "TZ") continue; + if (harness.bunEnv[key] === undefined) { + continue; + } + + process.env[key] = harness.bunEnv[key] + ""; +} + +Bun.$.env(process.env); From 048ae7c7b8dbb7d0accb8acfbcf9e3c6cbbc0c85 Mon Sep 17 00:00:00 2001 From: Zack Radisic <56137411+zackradisic@users.noreply.github.com> Date: Wed, 21 Feb 2024 18:32:42 -0800 Subject: [PATCH 14/21] shell: Fix latin-1 template literal strings (#9040) * Fix latin-1 * Move utf8 check above 8bit check --- src/shell/shell.zig | 31 ++++++++++++++++++++---------- test/js/bun/shell/bunshell.test.ts | 9 +++++++++ 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/src/shell/shell.zig b/src/shell/shell.zig index ca70a12e50..f713808224 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -1960,13 +1960,25 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { fn appendStringToStrPool(self: *@This(), bunstr: bun.String) !void { const start = self.strpool.items.len; - if (bunstr.is8Bit() or bunstr.isUTF8()) { - try self.strpool.appendSlice(bunstr.byteSlice()); - } else { + if (bunstr.isUTF16()) { const utf16 = bunstr.utf16(); const additional = bun.simdutf.simdutf__utf8_length_from_utf16le(utf16.ptr, utf16.len); try self.strpool.ensureUnusedCapacity(additional); try bun.strings.convertUTF16ToUTF8Append(&self.strpool, bunstr.utf16()); + } else if (bunstr.isUTF8()) { + try self.strpool.appendSlice(bunstr.byteSlice()); + } else if (bunstr.is8Bit()) { + if (isAllAscii(bunstr.byteSlice())) { + try self.strpool.appendSlice(bunstr.byteSlice()); + } else { + const bytes = bunstr.byteSlice(); + const non_ascii_idx = bun.strings.firstNonASCII(bytes) orelse 0; + + if (non_ascii_idx > 0) { + try self.strpool.appendSlice(bytes[0..non_ascii_idx]); + } + self.strpool = try bun.strings.allocateLatin1IntoUTF8WithList(self.strpool, self.strpool.items.len, []const u8, bytes[non_ascii_idx..]); + } } const end = self.strpool.items.len; self.j += @intCast(end - start); @@ -2899,16 +2911,15 @@ const SPECIAL_CHARS = [_]u8{ '$', '>', '&', '|', '=', ';', '\n', '{', '}', ',', const BACKSLASHABLE_CHARS = [_]u8{ '$', '`', '"', '\\' }; pub fn escapeBunStr(bunstr: bun.String, outbuf: *std.ArrayList(u8), comptime add_quotes: bool) !bool { - // latin-1 or ascii - if (bunstr.is8Bit()) { - try escape8Bit(bunstr.byteSlice(), outbuf, add_quotes); - return true; - } if (bunstr.isUTF16()) { return try escapeUtf16(bunstr.utf16(), outbuf, add_quotes); } - // Otherwise is utf-8 - try escapeWTF8(bunstr.byteSlice(), outbuf, add_quotes); + if (bunstr.isUTF8()) { + try escapeWTF8(bunstr.byteSlice(), outbuf, add_quotes); + return true; + } + // otherwise should be latin-1 or ascii + try escape8Bit(bunstr.byteSlice(), outbuf, add_quotes); return true; } diff --git a/test/js/bun/shell/bunshell.test.ts b/test/js/bun/shell/bunshell.test.ts index 5e0e168253..6a08a85573 100644 --- a/test/js/bun/shell/bunshell.test.ts +++ b/test/js/bun/shell/bunshell.test.ts @@ -269,6 +269,15 @@ describe("bunshell", () => { // }); }); + describe("latin-1", async () => { + test("basic", async () => { + await TestBuilder.command`echo ${"à"}`.stdout("à\n").run(); + await TestBuilder.command`echo ${" à"}`.stdout(" à\n").run(); + await TestBuilder.command`echo ${"à¿"}`.stdout("à¿\n").run(); + await TestBuilder.command`echo ${'"à¿"'}`.stdout('"à¿"\n').run(); + }); + }); + test("redirect Uint8Array", async () => { const buffer = new Uint8Array(1 << 20); const result = await $`cat ${import.meta.path} > ${buffer}`; From 44f7ddd2ffec6ead05f955dc71172d12e4be25ed Mon Sep 17 00:00:00 2001 From: dave caruso Date: Wed, 21 Feb 2024 18:33:54 -0800 Subject: [PATCH 15/21] fix: ConsoleObject handles proxy better (#9042) * fix: ConsoleObject handles proxy better * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/bun.js/ConsoleObject.zig | 79 +++++++++----------- src/bun.js/bindings/bindings.cpp | 6 ++ src/bun.js/bindings/bindings.zig | 12 +++ src/js/builtins/ProcessObjectInternals.ts | 8 ++ test/js/web/console/console-log.expected.txt | 5 ++ test/js/web/console/console-log.js | 31 ++++++++ 6 files changed, 97 insertions(+), 44 deletions(-) diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index e775e351d8..bf3c8b9067 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -915,7 +915,6 @@ pub const Formatter = struct { JSON, toJSON, NativeCode, - ArrayBuffer, JSX, Event, @@ -923,6 +922,9 @@ pub const Formatter = struct { GetterSetter, CustomGetterSetter, + Proxy, + RevokedProxy, + pub fn isPrimitive(this: Tag) bool { return switch (this) { .String, @@ -971,15 +973,20 @@ pub const Formatter = struct { JSON: void, toJSON: void, NativeCode: void, - ArrayBuffer: void, JSX: void, Event: void, GetterSetter: void, CustomGetterSetter: void, + Proxy: void, + RevokedProxy: void, pub fn isPrimitive(this: @This()) bool { return @as(Tag, this).isPrimitive(); } + + pub fn tag(this: @This()) Tag { + return @as(Tag, this); + } }, cell: JSValue.JSType = JSValue.JSType.Cell, }; @@ -1126,10 +1133,17 @@ pub const Formatter = struct { .Object, .FinalObject, - .ProxyObject, .ModuleNamespaceObject, => .Object, + .ProxyObject => tag: { + const handler = value.getProxyInternalField(.handler); + if (handler == .zero or handler == .undefined or handler == .null) { + break :tag .RevokedProxy; + } + break :tag .Proxy; + }, + .GlobalObject => if (!opts.hide_global) .Object else @@ -2877,7 +2891,20 @@ pub const Formatter = struct { writer.writeAll(" ]"); }, - else => {}, + .RevokedProxy => { + this.addForNewLine("".len); + writer.print(comptime Output.prettyFmt("\\", enable_ansi_colors), .{}); + }, + .Proxy => { + const target = value.getProxyInternalField(.target); + if (Environment.allow_assert) { + // Proxy does not allow non-objects here. + std.debug.assert(target.isCell()); + } + // TODO: if (options.showProxy), print like `Proxy { target: ..., handlers: ... }` + // this is default off so it is not used. + this.format(ConsoleObject.Formatter.Tag.get(target, this.globalThis), Writer, writer_, target, this.globalThis, enable_ansi_colors); + }, } } @@ -2908,9 +2935,6 @@ pub const Formatter = struct { } pub fn format(this: *ConsoleObject.Formatter, result: Tag.Result, comptime Writer: type, writer: Writer, value: JSValue, globalThis: *JSGlobalObject, comptime enable_ansi_colors: bool) void { - if (comptime is_bindgen) { - return; - } const prevGlobalThis = this.globalThis; defer this.globalThis = prevGlobalThis; this.globalThis = globalThis; @@ -2919,44 +2943,11 @@ pub const Formatter = struct { // comptime var so we have to repeat it here. The rationale there is // it _should_ limit the stack usage because each version of the // function will be relatively small - switch (result.tag) { - .StringPossiblyFormatted => this.printAs(.StringPossiblyFormatted, Writer, writer, value, result.cell, enable_ansi_colors), - .String => this.printAs(.String, Writer, writer, value, result.cell, enable_ansi_colors), - .Undefined => this.printAs(.Undefined, Writer, writer, value, result.cell, enable_ansi_colors), - .Double => this.printAs(.Double, Writer, writer, value, result.cell, enable_ansi_colors), - .Integer => this.printAs(.Integer, Writer, writer, value, result.cell, enable_ansi_colors), - .Null => this.printAs(.Null, Writer, writer, value, result.cell, enable_ansi_colors), - .Boolean => this.printAs(.Boolean, Writer, writer, value, result.cell, enable_ansi_colors), - .Array => this.printAs(.Array, Writer, writer, value, result.cell, enable_ansi_colors), - .Object => this.printAs(.Object, Writer, writer, value, result.cell, enable_ansi_colors), - .Function => this.printAs(.Function, Writer, writer, value, result.cell, enable_ansi_colors), - .Class => this.printAs(.Class, Writer, writer, value, result.cell, enable_ansi_colors), - .Error => this.printAs(.Error, Writer, writer, value, result.cell, enable_ansi_colors), - .ArrayBuffer, .TypedArray => this.printAs(.TypedArray, Writer, writer, value, result.cell, enable_ansi_colors), - .Map => this.printAs(.Map, Writer, writer, value, result.cell, enable_ansi_colors), - .MapIterator => this.printAs(.MapIterator, Writer, writer, value, result.cell, enable_ansi_colors), - .SetIterator => this.printAs(.SetIterator, Writer, writer, value, result.cell, enable_ansi_colors), - .Set => this.printAs(.Set, Writer, writer, value, result.cell, enable_ansi_colors), - .Symbol => this.printAs(.Symbol, Writer, writer, value, result.cell, enable_ansi_colors), - .BigInt => this.printAs(.BigInt, Writer, writer, value, result.cell, enable_ansi_colors), - .GlobalObject => this.printAs(.GlobalObject, Writer, writer, value, result.cell, enable_ansi_colors), - .Private => this.printAs(.Private, Writer, writer, value, result.cell, enable_ansi_colors), - .Promise => this.printAs(.Promise, Writer, writer, value, result.cell, enable_ansi_colors), + switch (result.tag.tag()) { + inline else => |tag| this.printAs(tag, Writer, writer, value, result.cell, enable_ansi_colors), - // Call JSON.stringify on the value - .JSON => this.printAs(.JSON, Writer, writer, value, result.cell, enable_ansi_colors), - - // Call value.toJSON() and print as an object - .toJSON => this.printAs(.toJSON, Writer, writer, value, result.cell, enable_ansi_colors), - - .NativeCode => this.printAs(.NativeCode, Writer, writer, value, result.cell, enable_ansi_colors), - .JSX => this.printAs(.JSX, Writer, writer, value, result.cell, enable_ansi_colors), - .Event => this.printAs(.Event, Writer, writer, value, result.cell, enable_ansi_colors), - .GetterSetter => this.printAs(.GetterSetter, Writer, writer, value, result.cell, enable_ansi_colors), - .CustomGetterSetter => this.printAs(.CustomGetterSetter, Writer, writer, value, result.cell, enable_ansi_colors), - - .CustomFormattedObject => |callback| { - this.custom_formatted_object = callback; + .CustomFormattedObject => { + this.custom_formatted_object = result.tag.CustomFormattedObject; this.printAs(.CustomFormattedObject, Writer, writer, value, result.cell, enable_ansi_colors); }, } diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index da91128e75..47dcde7be7 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -32,6 +32,7 @@ #include "JavaScriptCore/JSObject.h" #include "JavaScriptCore/JSSet.h" #include "JavaScriptCore/JSString.h" +#include "JavaScriptCore/ProxyObject.h" #include "JavaScriptCore/Microtask.h" #include "JavaScriptCore/ObjectConstructor.h" #include "JavaScriptCore/ParserError.h" @@ -5433,3 +5434,8 @@ CPP_DECL bool JSC__CustomGetterSetter__isSetterNull(JSC__CustomGetterSetter* get { return gettersetter->setter() == nullptr; } + +CPP_DECL JSC__JSValue Bun__ProxyObject__getInternalField(JSC__JSValue value, uint32_t id) +{ + return JSValue::encode(jsCast(JSValue::decode(value))->internalField((ProxyObject::Field)id).get()); +} \ No newline at end of file diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 26bcd5d7f3..45a1428636 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -5310,6 +5310,18 @@ pub const JSValue = enum(JSValueReprInt) { else null; } + + extern fn Bun__ProxyObject__getInternalField(this: JSValue, field: ProxyInternalField) JSValue; + + const ProxyInternalField = enum(u32) { + target = 0, + handler = 1, + }; + + /// Asserts `this` is a proxy + pub fn getProxyInternalField(this: JSValue, field: ProxyInternalField) JSValue { + return Bun__ProxyObject__getInternalField(this, field); + } }; extern "c" fn AsyncContextFrame__withAsyncContextIfNeeded(global: *JSGlobalObject, callback: JSValue) JSValue; diff --git a/src/js/builtins/ProcessObjectInternals.ts b/src/js/builtins/ProcessObjectInternals.ts index 691f018df3..16676e2285 100644 --- a/src/js/builtins/ProcessObjectInternals.ts +++ b/src/js/builtins/ProcessObjectInternals.ts @@ -353,6 +353,14 @@ export function windowsEnv(internalEnv: InternalEnvMap, envMapList: Array { + let o = {}; + for (let k of envMapList) { + o[k] = internalEnv[k.toUpperCase()]; + } + return o; + }; + return new Proxy(internalEnv, { get(_, p) { return typeof p === "string" ? Reflect.get(internalEnv, p.toUpperCase()) : undefined; diff --git a/test/js/web/console/console-log.expected.txt b/test/js/web/console/console-log.expected.txt index 97ab621065..bcb85c590f 100644 --- a/test/js/web/console/console-log.expected.txt +++ b/test/js/web/console/console-log.expected.txt @@ -244,3 +244,8 @@ myCustomName { { "": "", } +{ + hello: 2, +} + +custom inspect diff --git a/test/js/web/console/console-log.js b/test/js/web/console/console-log.js index a45e4af242..34a744eee4 100644 --- a/test/js/web/console/console-log.js +++ b/test/js/web/console/console-log.js @@ -173,3 +173,34 @@ console.log(hole([1, 2, 3], 0)); // It appears to not be set and I don't know why. console.log({ "": "" }); + +{ + // proxy + const proxy = Proxy.revocable( + { hello: 2 }, + { + get(target, prop, receiver) { + console.log("FAILED: GET", prop); + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + console.log("FAILED: SET", prop, value); + return Reflect.set(target, prop, value, receiver); + }, + }, + ); + console.log(proxy.proxy); + proxy.revoke(); + console.log(proxy.proxy); +} + +{ + // proxy custom inspect + const proxy = new Proxy( + { + [Bun.inspect.custom]: () => "custom inspect", + }, + {}, + ); + console.log(proxy); +} From 53739f8a535d611b591bfc0250a666e5b5d5e007 Mon Sep 17 00:00:00 2001 From: argosphil Date: Thu, 22 Feb 2024 02:34:18 +0000 Subject: [PATCH 16/21] fix: modify bcrypt to be able to verify passwords directly (#9010) Fixes #9009. This would make the "bcrypt" algorithm (actually a variation of it) easier to use. --- src/bun.js/api/BunObject.zig | 14 +++++++++++++- test/js/bun/util/password.test.ts | 12 ++++-------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index a0266f8505..2e687edb58 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -1952,7 +1952,19 @@ pub const Crypto = struct { return true; }, .bcrypt => { - pwhash.bcrypt.strVerify(previous_hash, password, .{ .allocator = allocator }) catch |err| { + var password_to_use = password; + var outbuf: [bun.sha.SHA512.digest]u8 = undefined; + + // bcrypt silently truncates passwords longer than 72 bytes + // we use SHA512 to hash the password if it's longer than 72 bytes + if (password.len > 72) { + var sha_512 = bun.sha.SHA512.init(); + defer sha_512.deinit(); + sha_512.update(password); + sha_512.final(&outbuf); + password_to_use = &outbuf; + } + pwhash.bcrypt.strVerify(previous_hash, password_to_use, .{ .allocator = allocator }) catch |err| { if (err == error.PasswordVerificationFailed) { return false; } diff --git a/test/js/bun/util/password.test.ts b/test/js/bun/util/password.test.ts index 0f916ca447..b84a10e454 100644 --- a/test/js/bun/util/password.test.ts +++ b/test/js/bun/util/password.test.ts @@ -174,16 +174,12 @@ describe("verify", () => { }); }); -test("bcrypt longer than 72 characters is the SHA-512", async () => { +test("bcrypt uses the SHA-512 of passwords longer than 72 characters", async () => { const boop = Buffer.from("hey".repeat(100)); const hashed = await password.hash(boop, "bcrypt"); - expect(await password.verify(Bun.SHA512.hash(boop), hashed, "bcrypt")).toBeTrue(); -}); - -test("bcrypt shorter than 72 characters is NOT the SHA-512", async () => { - const boop = Buffer.from("hey".repeat(3)); - const hashed = await password.hash(boop, "bcrypt"); - expect(await password.verify(Bun.SHA512.hash(boop), hashed, "bcrypt")).toBeFalse(); + expect(await password.verify(boop, hashed, "bcrypt")).toBeTrue(); + const boop2 = Buffer.from("hey".repeat(24)); + expect(await password.verify(boop2, hashed, "bcrypt")).toBeFalse(); }); const defaultAlgorithm = "argon2id"; From 2605722891f32edf52efc53a66ff1bdca5e9f69f Mon Sep 17 00:00:00 2001 From: Zack Radisic <56137411+zackradisic@users.noreply.github.com> Date: Wed, 21 Feb 2024 18:45:44 -0800 Subject: [PATCH 17/21] shell: Allow duplicating output fds (e.g. `2>&1`) (#9004) * Open with proper perms when redirecting file to stdin * Add test for redirecting file to stdin * Extract redirect flags -> bun.Mode logic to function * Remove dead code * Support duplicating output file descriptors * Clean up * fix merge fuck up * Add comment documenting weird hack to get around ordering of posix spawn actions * Update docs * Delete dead code * Update docs --- docs/runtime/shell.md | 64 +++- src/shell/interpreter.zig | 40 ++- src/shell/shell.zig | 135 ++++++-- src/shell/subproc.zig | 7 +- src/shell/util.zig | 26 +- test/js/bun/shell/bunshell.test.ts | 18 + test/js/bun/shell/lex.test.ts | 77 ++++- test/js/bun/shell/parse.test.ts | 505 +++++++++++++++-------------- test/js/bun/shell/test_builder.ts | 13 +- test/js/bun/shell/util.ts | 1 + 10 files changed, 593 insertions(+), 293 deletions(-) diff --git a/docs/runtime/shell.md b/docs/runtime/shell.md index 7b2d53c0d9..fe0a379861 100644 --- a/docs/runtime/shell.md +++ b/docs/runtime/shell.md @@ -67,9 +67,20 @@ console.log(exitCode); // 0 ## Redirection -Bun Shell supports redirection with `<`, `>`, and `|` operators. +A command's _input_ or _output_ may be _redirected_ using the typical Bash operators: +- `<` redirect stdin +- `>` or `1>` redirect stdout +- `2>` redirect stderr +- `&>` redirect both stdout and stderr +- `>>` or `1>>` redirect stdout, _appending_ to the destination, instead of overwriting +- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting +- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting +- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr) +- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout) -### To JavaScript objects (`>`) +Bun Shell also supports redirecting from and to JavaScript objects. + +### Example: Redirect output to JavaScript objects (`>`) To redirect stdout to a JavaScript object, use the `>` operator: @@ -88,7 +99,7 @@ The following JavaScript objects are supported for redirection to: - `Buffer`, `Uint8Array`, `Uint16Array`, `Uint32Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Float32Array`, `Float64Array`, `ArrayBuffer`, `SharedArrayBuffer` (writes to the underlying buffer) - `Bun.file(path)`, `Bun.file(fd)` (writes to the file) -### From JavaScript objects (`<`) +### Example: Redirect input from JavaScript objects (`<`) To redirect the output from JavaScript objects to stdin, use the `<` operator: @@ -108,7 +119,52 @@ The following JavaScript objects are supported for redirection from: - `Bun.file(path)`, `Bun.file(fd)` (reads from the file) - `Response` (reads from the body) -### Piping (`|`) +### Example: Redirect stdin -> file + +```js +import { $ } from "bun" + +await $`cat < myfile.txt` +``` + +### Example: Redirect stdout -> file + +```js +import { $ } from "bun" + +await $`echo bun! > greeting.txt` +``` + +### Example: Redirect stderr -> file + +```js +import { $ } from "bun" + +await $`bun run index.ts 2> errors.txt` +``` + + +### Example: Redirect stdout -> stderr + +```js +import { $ } from "bun" + +// redirects stderr to stdout, so all output +// will be available on stdout +await $`bun run ./index.ts 2>&1` +``` + +### Example: Redirect stderr -> stdout + +```js +import { $ } from "bun" + +// redirects stdout to stderr, so all output +// will be available on stderr +await $`bun run ./index.ts 1>&2` +``` + +## Piping (`|`) Like in bash, you can pipe the output of one command to another: diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index c3458d6c17..46a9a2596e 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -158,7 +158,7 @@ pub const IO = struct { std: struct { captured: ?*bun.ByteList = null }, /// Write/Read to/from file descriptor fd: bun.FileDescriptor, - /// Buffers the output + /// Buffers the output (handled in Cmd.BufferedIoClosed.close()) pipe, /// Discards output ignore, @@ -2919,6 +2919,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { exec: Exec = .none, exit_code: ?ExitCode = null, io: IO, + // duplicate_out: enum { none, stdout, stderr } = .none, freed: bool = false, state: union(enum) { @@ -3004,7 +3005,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { const readable = io.stdout; // If the shell state is piped (inside a cmd substitution) aggregate the output of this command - if (cmd.base.shell.io.stdout == .pipe and cmd.io.stdout == .pipe and !cmd.node.redirect.stdout) { + if (cmd.base.shell.io.stdout == .pipe and cmd.io.stdout == .pipe and !cmd.node.redirect.redirectsElsewhere(.stdout)) { cmd.base.shell.buffered_stdout().append(bun.default_allocator, readable.pipe.buffer.internal_buffer.slice()) catch bun.outOfMemory(); } @@ -3017,7 +3018,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { const readable = io.stderr; // If the shell state is piped (inside a cmd substitution) aggregate the output of this command - if (cmd.base.shell.io.stderr == .pipe and cmd.io.stderr == .pipe and !cmd.node.redirect.stdout) { + if (cmd.base.shell.io.stderr == .pipe and cmd.io.stderr == .pipe and !cmd.node.redirect.redirectsElsewhere(.stderr)) { cmd.base.shell.buffered_stderr().append(bun.default_allocator, readable.pipe.buffer.internal_buffer.slice()) catch bun.outOfMemory(); } @@ -3489,6 +3490,14 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { setStdioFromRedirect(&spawn_args.stdio, this.node.redirect, .{ .fd = redirfd }); }, } + } else if (this.node.redirect.duplicate_out) { + if (this.node.redirect.stdout) { + spawn_args.stdio[stderr_no] = .{ .dup2 = .{ .out = .stderr, .to = .stdout } }; + } + + if (this.node.redirect.stderr) { + spawn_args.stdio[stdout_no] = .{ .dup2 = .{ .out = .stdout, .to = .stderr } }; + } } const buffered_closed = BufferedIoClosed.fromStdio(&spawn_args.stdio); @@ -3519,12 +3528,17 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { stdio.*[stdin_no] = val; } - if (flags.stdout) { + if (flags.duplicate_out) { stdio.*[stdout_no] = val; - } - - if (flags.stderr) { stdio.*[stderr_no] = val; + } else { + if (flags.stdout) { + stdio.*[stdout_no] = val; + } + + if (flags.stderr) { + stdio.*[stderr_no] = val; + } } } @@ -3649,7 +3663,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { std.debug.assert(this.exec == .subproc); } log("cmd ({x}) close buffered stdout", .{@intFromPtr(this)}); - if (this.io.stdout == .std and this.io.stdout.std.captured != null and !this.node.redirect.stdout) { + if (this.io.stdout == .std and this.io.stdout.std.captured != null and !this.node.redirect.redirectsElsewhere(.stdout)) { var buf = this.io.stdout.std.captured.?; buf.append(bun.default_allocator, this.exec.subproc.child.stdout.pipe.buffer.internal_buffer.slice()) catch bun.outOfMemory(); } @@ -3662,7 +3676,7 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { std.debug.assert(this.exec == .subproc); } log("cmd ({x}) close buffered stderr", .{@intFromPtr(this)}); - if (this.io.stderr == .std and this.io.stderr.std.captured != null and !this.node.redirect.stderr) { + if (this.io.stderr == .std and this.io.stderr.std.captured != null and !this.node.redirect.redirectsElsewhere(.stderr)) { var buf = this.io.stderr.std.captured.?; buf.append(bun.default_allocator, this.exec.subproc.child.stderr.pipe.buffer.internal_buffer.slice()) catch bun.outOfMemory(); } @@ -4073,6 +4087,14 @@ pub fn NewInterpreter(comptime EventLoopKind: JSC.EventLoopKind) type { } }, } + } else if (node.redirect.duplicate_out) { + if (node.redirect.stdout) { + cmd.exec.bltn.stderr = cmd.exec.bltn.stdout; + } + + if (node.redirect.stderr) { + cmd.exec.bltn.stdout = cmd.exec.bltn.stderr; + } } return .cont; diff --git a/src/shell/shell.zig b/src/shell/shell.zig index f713808224..5873c4dce4 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -442,7 +442,26 @@ pub const AST = struct { stdout: bool = false, stderr: bool = false, append: bool = false, - __unused: u4 = 0, + /// 1>&2 === stdout=true and duplicate_out=true + /// 2>&1 === stderr=true and duplicate_out=true + duplicate_out: bool = false, + __unused: u3 = 0, + + pub fn redirectsElsewhere(this: RedirectFlags, io_kind: enum { stdin, stdout, stderr }) bool { + return switch (io_kind) { + .stdin => this.stdin, + .stdout => if (this.duplicate_out) !this.stdout else this.stdout, + .stderr => if (this.duplicate_out) !this.stderr else this.stderr, + }; + } + + pub fn @"2>&1"() RedirectFlags { + return .{ .stderr = true, .duplicate = true }; + } + + pub fn @"1>&2"() RedirectFlags { + return .{ .stdout = true, .duplicate = true }; + } pub fn toFlags(this: RedirectFlags) bun.Mode { const read_write_flags: bun.Mode = if (this.stdin) std.os.O.RDONLY else std.os.O.WRONLY | std.os.O.CREAT; @@ -792,6 +811,7 @@ pub const Parser = struct { } const redirect_file = try self.parse_atom() orelse { + if (redirect.duplicate_out) break :redirect_file null; try self.add_error("Redirection with no file", .{}); return ParseError.Expected; }; @@ -809,6 +829,8 @@ pub const Parser = struct { } }; } + const ParsedRedirect = struct { flags: AST.Cmd.RedirectFlags, redirect: AST.Cmd.Redirect }; + /// Try to parse an assignment. If no assignment could be parsed then return /// null and backtrack the parser state fn parse_assign(self: *Parser) !?AST.Assign { @@ -1812,9 +1834,67 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { return false; } + // TODO Arbitrary file descriptor redirect fn eat_redirect(self: *@This(), first: InputChar) ?AST.Cmd.RedirectFlags { var flags: AST.Cmd.RedirectFlags = .{}; switch (first.char) { + '0' => flags.stdin = true, + '1' => flags.stdout = true, + '2' => flags.stderr = true, + // Just allow the std file descriptors for now + else => return null, + } + var dir: RedirectDirection = .out; + if (self.peek()) |input| { + if (input.escaped) return null; + switch (input.char) { + '>' => { + _ = self.eat(); + dir = .out; + const is_double = self.eat_simple_redirect_operator(dir); + if (is_double) flags.append = true; + if (self.peek()) |peeked| { + if (!peeked.escaped and peeked.char == '&') { + _ = self.eat(); + if (self.peek()) |peeked2| { + switch (peeked2.char) { + '1' => { + _ = self.eat(); + if (!flags.stdout and flags.stderr) { + flags.duplicate_out = true; + flags.stdout = true; + flags.stderr = false; + } else return null; + }, + '2' => { + _ = self.eat(); + if (!flags.stderr and flags.stdout) { + flags.duplicate_out = true; + flags.stderr = true; + flags.stdout = false; + } else return null; + }, + else => return null, + } + } + } + } + return flags; + }, + '<' => { + dir = .in; + const is_double = self.eat_simple_redirect_operator(dir); + if (is_double) flags.append = true; + return flags; + }, + else => return null, + } + } else return null; + } + + fn eat_redirect_old(self: *@This(), first: InputChar) ?AST.Cmd.RedirectFlags { + var flags: AST.Cmd.RedirectFlags = .{}; + if (self.matchesAsciiLiteral("2>&1")) {} else if (self.matchesAsciiLiteral("1>&2")) {} else switch (first.char) { '0'...'9' => { // Codepoint int casts are safe here because the digits are in the ASCII range var count: usize = 1; @@ -2000,10 +2080,33 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { return std.mem.eql(u8, bytes[0 .. LEX_JS_STRING_PREFIX.len - 1], LEX_JS_STRING_PREFIX[1..]); } - fn eatJSSubstitutionIdx(self: *@This(), comptime literal: []const u8, comptime name: []const u8, comptime validate: *const fn (*@This(), usize) bool) ?usize { + fn bumpCursorAscii(self: *@This(), new_idx: usize, prev_ascii_char: ?u7, cur_ascii_char: u7) void { + if (comptime encoding == .ascii) { + self.chars.src.i = new_idx; + if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc }; + self.chars.current = .{ .char = cur_ascii_char }; + return; + } + self.chars.src.cursor = CodepointIterator.Cursor{ + .i = @intCast(new_idx), + .c = cur_ascii_char, + .width = 1, + }; + self.chars.src.next_cursor = self.chars.src.cursor; + SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.next_cursor); + if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc }; + self.chars.current = .{ .char = cur_ascii_char }; + } + + fn matchesAsciiLiteral(self: *@This(), literal: []const u8) bool { const bytes = self.chars.srcBytesAtCursor(); - if (literal.len - 1 >= bytes.len) return null; - if (std.mem.eql(u8, bytes[0 .. literal.len - 1], literal[1..])) { + if (literal.len >= bytes.len) return false; + return std.mem.eql(u8, bytes[0..literal.len], literal[0..]); + } + + fn eatJSSubstitutionIdx(self: *@This(), comptime literal: []const u8, comptime name: []const u8, comptime validate: *const fn (*@This(), usize) bool) ?usize { + if (self.matchesAsciiLiteral(literal[1..literal.len])) { + const bytes = self.chars.srcBytesAtCursor(); var i: usize = 0; var digit_buf: [32]u8 = undefined; var digit_buf_count: u8 = 0; @@ -2044,26 +2147,10 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { // } // Bump the cursor - brk: { - const new_idx = self.chars.cursorPos() + i; - const prev_ascii_char: ?u7 = if (digit_buf_count == 1) null else @truncate(digit_buf[digit_buf_count - 2]); - const cur_ascii_char: u7 = @truncate(digit_buf[digit_buf_count - 1]); - if (comptime encoding == .ascii) { - self.chars.src.i = new_idx; - if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc }; - self.chars.current = .{ .char = cur_ascii_char }; - break :brk; - } - self.chars.src.cursor = CodepointIterator.Cursor{ - .i = @intCast(new_idx), - .c = cur_ascii_char, - .width = 1, - }; - self.chars.src.next_cursor = self.chars.src.cursor; - SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.next_cursor); - if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc }; - self.chars.current = .{ .char = cur_ascii_char }; - } + const new_idx = self.chars.cursorPos() + i; + const prev_ascii_char: ?u7 = if (digit_buf_count == 1) null else @truncate(digit_buf[digit_buf_count - 2]); + const cur_ascii_char: u7 = @truncate(digit_buf[digit_buf_count - 1]); + self.bumpCursorAscii(new_idx, prev_ascii_char, cur_ascii_char); // return self.string_refs[idx]; return idx; diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig index 2fc4550808..af7673bc03 100644 --- a/src/shell/subproc.zig +++ b/src/shell/subproc.zig @@ -211,7 +211,7 @@ pub fn NewShellSubprocess(comptime EventLoopKind: JSC.EventLoopKind, comptime Sh return Writable{ .pipe = sink }; }, - .array_buffer, .blob => { + .dup2, .array_buffer, .blob => { var buffered_input: BufferedInput = .{ .fd = fd, .source = undefined, .subproc = subproc }; switch (stdio) { .array_buffer => |array_buffer| { @@ -383,7 +383,7 @@ pub fn NewShellSubprocess(comptime EventLoopKind: JSC.EventLoopKind, comptime Sh return Readable{ .inherit = {} }; }, .path => Readable{ .ignore = {} }, - .blob, .fd => Readable{ .fd = fd }, + .dup2, .blob, .fd => Readable{ .fd = fd }, .array_buffer => { var subproc_readable_ptr = subproc.getIO(kind); subproc_readable_ptr.* = Readable{ @@ -1447,6 +1447,7 @@ pub fn NewShellSubprocess(comptime EventLoopKind: JSC.EventLoopKind, comptime Sh spawn_args.stdio[0].setUpChildIoPosixSpawn( &actions, stdin_pipe, + stderr_pipe, bun.STDIN_FD, ) catch |err| { return .{ .err = globalThis.handleError(err, "in configuring child stdin") }; @@ -1455,6 +1456,7 @@ pub fn NewShellSubprocess(comptime EventLoopKind: JSC.EventLoopKind, comptime Sh spawn_args.stdio[1].setUpChildIoPosixSpawn( &actions, stdout_pipe, + stderr_pipe, bun.STDOUT_FD, ) catch |err| { return .{ .err = globalThis.handleError(err, "in configuring child stdout") }; @@ -1463,6 +1465,7 @@ pub fn NewShellSubprocess(comptime EventLoopKind: JSC.EventLoopKind, comptime Sh spawn_args.stdio[2].setUpChildIoPosixSpawn( &actions, stderr_pipe, + stderr_pipe, bun.STDERR_FD, ) catch |err| { return .{ .err = globalThis.handleError(err, "in configuring child stderr") }; diff --git a/src/shell/util.zig b/src/shell/util.zig index 4d788860c0..ff15942c55 100644 --- a/src/shell/util.zig +++ b/src/shell/util.zig @@ -14,13 +14,23 @@ const Output = @import("root").bun.Output; const PosixSpawn = @import("../bun.js/api/bun/spawn.zig").PosixSpawn; const os = std.os; -pub const OutKind = enum { stdout, stderr }; +pub const OutKind = enum { + stdout, + stderr, + pub fn toFd(this: OutKind) bun.FileDescriptor { + return switch (this) { + .stdout => bun.STDOUT_FD, + .stderr => bun.STDERR_FD, + }; + } +}; pub const Stdio = union(enum) { /// When set to true, it means to capture the output inherit: struct { captured: ?*bun.ByteList = null }, ignore: void, fd: bun.FileDescriptor, + dup2: struct { out: OutKind, to: OutKind }, path: JSC.Node.PathLike, blob: JSC.WebCore.AnyBlob, pipe: ?JSC.WebCore.ReadableStream, @@ -38,9 +48,23 @@ pub const Stdio = union(enum) { stdio: @This(), actions: *PosixSpawn.Actions, pipe_fd: [2]bun.FileDescriptor, + stderr_pipe_fds: [2]bun.FileDescriptor, comptime std_fileno: bun.FileDescriptor, ) !void { switch (stdio) { + .dup2 => { + // This is a hack to get around the ordering of the spawn actions. + // If stdout is set so that it redirects to stderr, the order of actions will be like this: + // 0. dup2(stderr, stdout) - this makes stdout point to stderr + // 1. setup stderr (will make stderr point to write end of `stderr_pipe_fds`) + // This is actually wrong, 0 will execute before 1 so stdout ends up writing to stderr instead of the pipe + // So we have to instead do `dup2(stderr_pipe_fd[1], stdout)` + // Right now we only allow one output redirection so it's okay. + if (comptime std_fileno == bun.STDOUT_FD) { + const idx: usize = if (std_fileno == bun.STDIN_FD) 0 else 1; + try actions.dup2(stderr_pipe_fds[idx], stdio.dup2.out.toFd()); + } else try actions.dup2(stdio.dup2.to.toFd(), stdio.dup2.out.toFd()); + }, .array_buffer, .blob, .pipe => { std.debug.assert(!(stdio == .blob and stdio.blob.needsToReadFile())); const idx: usize = if (std_fileno == bun.STDIN_FD) 0 else 1; diff --git a/test/js/bun/shell/bunshell.test.ts b/test/js/bun/shell/bunshell.test.ts index 6a08a85573..2351f6f998 100644 --- a/test/js/bun/shell/bunshell.test.ts +++ b/test/js/bun/shell/bunshell.test.ts @@ -689,6 +689,24 @@ describe("deno_task", () => { await TestBuilder.command`echo 1 > $EMPTY`.stderr("bun: ambiguous redirect: at `echo`\n").exitCode(1).run(); await TestBuilder.command`echo foo bar > file.txt; cat < file.txt`.ensureTempDir().stdout("foo bar\n").run(); + + await TestBuilder.command`BUN_DEBUG_QUIET_LOGS=1 ${BUN} -e ${"console.log('Stdout'); console.error('Stderr')"} 2>&1` + .stdout("Stdout\nStderr\n") + .run(); + + await TestBuilder.command`BUN_DEBUG_QUIET_LOGS=1 ${BUN} -e ${"console.log('Stdout'); console.error('Stderr')"} 1>&2` + .stderr("Stdout\nStderr\n") + .run(); + + await TestBuilder.command`BUN_DEBUG_QUIET_LOGS=1 ${BUN} -e ${"console.log('Stdout'); console.error('Stderr')"} 2>&1` + .stdout("Stdout\nStderr\n") + .quiet() + .run(); + + await TestBuilder.command`BUN_DEBUG_QUIET_LOGS=1 ${BUN} -e ${"console.log('Stdout'); console.error('Stderr')"} 1>&2` + .stderr("Stdout\nStderr\n") + .quiet() + .run(); }); test("pwd", async () => { diff --git a/test/js/bun/shell/lex.test.ts b/test/js/bun/shell/lex.test.ts index ebc28125bb..9f0e4856ef 100644 --- a/test/js/bun/shell/lex.test.ts +++ b/test/js/bun/shell/lex.test.ts @@ -313,7 +313,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": true, "stdout": false, "stderr": false, "append": false, "__unused": 0 } }, + { + "Redirect": { + "stdin": true, + "stdout": false, + "stderr": false, + "append": false, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -324,7 +333,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": false, "append": false, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": true, + "stderr": false, + "append": false, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -335,7 +353,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": false, "stderr": true, "append": false, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": false, + "stderr": true, + "append": false, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -346,7 +373,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": true, "append": false, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": true, + "stderr": true, + "append": false, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -357,7 +393,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": false, "append": true, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": true, + "stderr": false, + "append": true, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -368,7 +413,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": false, "stderr": true, "append": true, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": false, + "stderr": true, + "append": true, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, @@ -379,7 +433,16 @@ describe("lex shell", () => { expected = [ { "Text": "cmd1" }, { "Delimit": {} }, - { "Redirect": { "stdin": false, "stdout": true, "stderr": true, "append": true, "__unused": 0 } }, + { + "Redirect": { + "stdin": false, + "stdout": true, + "stderr": true, + "append": true, + duplicate_out: false, + "__unused": 0, + }, + }, { "Text": "file.txt" }, { "Delimit": {} }, { "Eof": {} }, diff --git a/test/js/bun/shell/parse.test.ts b/test/js/bun/shell/parse.test.ts index 6d75d56a91..132348160d 100644 --- a/test/js/bun/shell/parse.test.ts +++ b/test/js/bun/shell/parse.test.ts @@ -39,15 +39,15 @@ describe("parse shell", () => { test("basic redirect", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "foo" } }], - "redirect": redirect({ stdout: true }), - "redirect_file": { atom: { "simple": { "Text": "lmao.txt" } } }, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "foo" } }], + redirect: redirect({ stdout: true }), + redirect_file: { atom: { simple: { Text: "lmao.txt" } } }, }, }, ], @@ -61,23 +61,23 @@ describe("parse shell", () => { test("compound atom", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [ + cmd: { + assigns: [], + name_and_args: [ { - "compound": { - "atoms": [{ "Text": "FOO " }, { "Var": "NICE" }, { "Text": "!" }], + compound: { + atoms: [{ Text: "FOO " }, { Var: "NICE" }, { Text: "!" }], brace_expansion_hint: false, glob_hint: false, }, }, ], - "redirect": redirect({}), - "redirect_file": null, + redirect: redirect({}), + redirect_file: null, }, }, ], @@ -92,26 +92,26 @@ describe("parse shell", () => { test("pipelines", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "pipeline": { - "items": [ + pipeline: { + items: [ { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }], - "redirect": redirect({ stdout: true }), - "redirect_file": { atom: { "simple": { "Text": "foo.txt" } } }, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }], + redirect: redirect({ stdout: true }), + redirect_file: { atom: { simple: { Text: "foo.txt" } } }, }, }, { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "hi" } }], - "redirect": redirect({}), - "redirect_file": null, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "hi" } }], + redirect: redirect({}), + redirect_file: null, }, }, ], @@ -128,39 +128,39 @@ describe("parse shell", () => { test("conditional execution", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cond": { - "op": "Or", - "left": { - "cond": { - "op": "And", - "left": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "foo" } }], - "redirect": redirect(), - "redirect_file": null, + cond: { + op: "Or", + left: { + cond: { + op: "And", + left: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "foo" } }], + redirect: redirect(), + redirect_file: null, }, }, - "right": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "bar" } }], - "redirect": redirect(), - "redirect_file": null, + right: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "bar" } }], + redirect: redirect(), + redirect_file: null, }, }, }, }, - "right": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "lmao" } }], - "redirect": redirect(), - "redirect_file": null, + right: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "lmao" } }], + redirect: redirect(), + redirect_file: null, }, }, }, @@ -176,53 +176,55 @@ describe("parse shell", () => { test("precedence", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cond": { - "op": "And", - "left": { - "cond": { - "op": "And", - "left": { - "assign": [{ "label": "FOO", "value": { "simple": { "Text": "bar" } } }], + cond: { + op: "And", + left: { + cond: { + op: "And", + left: { + assign: [{ label: "FOO", value: { simple: { Text: "bar" } } }], }, - "right": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "foo" } }], - "redirect": redirect(), - "redirect_file": null, + right: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "foo" } }], + redirect: redirect(), + redirect_file: null, }, }, }, }, - "right": { - "pipeline": { - "items": [ + right: { + pipeline: { + items: [ { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "bar" } }], - "redirect": redirect(), - "redirect_file": null, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "bar" } }], + redirect: redirect(), + redirect_file: null, }, }, { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "lmao" } }], - "redirect": redirect(), - "redirect_file": null, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "lmao" } }], + redirect: redirect(), + redirect_file: null, }, }, { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "cat" } }], - "redirect": redirect({ stdout: true }), - "redirect_file": { atom: { "simple": { "Text": "foo.txt" } } }, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "cat" } }], + redirect: redirect({ stdout: true }), + redirect_file: { + atom: { simple: { Text: "foo.txt" } }, + }, }, }, ], @@ -242,24 +244,25 @@ describe("parse shell", () => { test("assigns", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [ - { "label": "FOO", "value": { "simple": { "Text": "bar" } } }, - { "label": "BAR", "value": { "simple": { "Text": "baz" } } }, + cmd: { + assigns: [ + { label: "FOO", value: { simple: { Text: "bar" } } }, + { label: "BAR", value: { simple: { Text: "baz" } } }, ], - "name_and_args": [{ "simple": { "Text": "export" } }, { "simple": { "Text": "LMAO=nice" } }], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + name_and_args: [{ simple: { Text: "export" } }, { simple: { Text: "LMAO=nice" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], @@ -274,26 +277,26 @@ describe("parse shell", () => { test("redirect js obj", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cond": { - "op": "And", - "left": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "foo" } }], - "redirect": redirect({ stdout: true }), - "redirect_file": { "jsbuf": { "idx": 0 } }, + cond: { + op: "And", + left: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "foo" } }], + redirect: redirect({ stdout: true }), + redirect_file: { jsbuf: { idx: 0 } }, }, }, - "right": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "foo" } }], - "redirect": redirect({ stdout: true }), - "redirect_file": { "jsbuf": { "idx": 1 } }, + right: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "foo" } }], + redirect: redirect({ stdout: true }), + redirect_file: { jsbuf: { idx: 1 } }, }, }, }, @@ -313,70 +316,73 @@ describe("parse shell", () => { test("cmd subst", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [ - { "simple": { "Text": "echo" } }, + cmd: { + assigns: [], + name_and_args: [ + { simple: { Text: "echo" } }, { - "simple": { - "cmd_subst": { - "script": { - "stmts": [ + simple: { + cmd_subst: { + script: { + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "1" } }], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "1" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], }, { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "2" } }], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "2" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], }, ], }, - "quoted": true, + quoted: true, }, }, }, ], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], @@ -390,60 +396,72 @@ describe("parse shell", () => { test("cmd subst edgecase", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cond": { - "op": "And", - "left": { - "cmd": { - "assigns": [], - "name_and_args": [ - { "simple": { "Text": "echo" } }, + cond: { + op: "And", + left: { + cmd: { + assigns: [], + name_and_args: [ + { simple: { Text: "echo" } }, { - "simple": { - "cmd_subst": { - "script": { - "stmts": [ + simple: { + cmd_subst: { + script: { + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [ - { "simple": { "Text": "ls" } }, - { "simple": { "Text": "foo" } }, - ], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "ls" } }, { simple: { Text: "foo" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], }, ], }, - "quoted": false, + quoted: false, }, }, }, ], - "redirect": { "stdin": false, "stdout": false, "stderr": false, "append": false, "__unused": 0 }, - "redirect_file": null, + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, + }, + redirect_file: null, }, }, - "right": { - "cmd": { - "assigns": [], - "name_and_args": [{ "simple": { "Text": "echo" } }, { "simple": { "Text": "nice" } }], - "redirect": { "stdin": false, "stdout": false, "stderr": false, "append": false, "__unused": 0 }, - "redirect_file": null, + right: { + cmd: { + assigns: [], + name_and_args: [{ simple: { Text: "echo" } }, { simple: { Text: "nice" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, + }, + redirect_file: null, }, }, }, @@ -459,57 +477,59 @@ describe("parse shell", () => { describe("bad syntax", () => { test("cmd subst edgecase", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [], - "name_and_args": [ - { "simple": { "Text": "echo" } }, + cmd: { + assigns: [], + name_and_args: [ + { simple: { Text: "echo" } }, { - "simple": { - "cmd_subst": { - "script": { - "stmts": [ + simple: { + cmd_subst: { + script: { + stmts: [ { - "exprs": [ + exprs: [ { - "cmd": { - "assigns": [ + cmd: { + assigns: [ { - "label": "FOO", - "value": { "simple": { "Text": "bar" } }, + label: "FOO", + value: { simple: { Text: "bar" } }, }, ], - "name_and_args": [{ "simple": { "Var": "FOO" } }], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + name_and_args: [{ simple: { Var: "FOO" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], }, ], }, - "quoted": false, + quoted: false, }, }, }, ], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], @@ -522,32 +542,33 @@ describe("parse shell", () => { test("cmd edgecase", () => { const expected = { - "stmts": [ + stmts: [ { - "exprs": [ + exprs: [ { - "assign": [ - { "label": "FOO", "value": { "simple": { "Text": "bar" } } }, - { "label": "BAR", "value": { "simple": { "Text": "baz" } } }, + assign: [ + { label: "FOO", value: { simple: { Text: "bar" } } }, + { label: "BAR", value: { simple: { Text: "baz" } } }, ], }, { - "cmd": { - "assigns": [ + cmd: { + assigns: [ { - "label": "BUN_DEBUG_QUIET_LOGS", - "value": { "simple": { "Text": "1" } }, + label: "BUN_DEBUG_QUIET_LOGS", + value: { simple: { Text: "1" } }, }, ], - "name_and_args": [{ "simple": { "Text": "echo" } }], - "redirect": { - "stdin": false, - "stdout": false, - "stderr": false, - "append": false, - "__unused": 0, + name_and_args: [{ simple: { Text: "echo" } }], + redirect: { + stdin: false, + stdout: false, + stderr: false, + append: false, + duplicate_out: false, + __unused: 0, }, - "redirect_file": null, + redirect_file: null, }, }, ], diff --git a/test/js/bun/shell/test_builder.ts b/test/js/bun/shell/test_builder.ts index 2a39d61ad4..9bb9911e0b 100644 --- a/test/js/bun/shell/test_builder.ts +++ b/test/js/bun/shell/test_builder.ts @@ -11,7 +11,7 @@ export class TestBuilder { private _testName: string | undefined = undefined; private expected_stdout: string | ((stdout: string, tempdir: string) => void) = ""; - private expected_stderr: string = ""; + private expected_stderr: string | ((stderr: string, tempdir: string) => void) = ""; private expected_exit_code: number = 0; private expected_error: ShellError | string | boolean | undefined = undefined; private file_equals: { [filename: string]: string } = {}; @@ -75,7 +75,7 @@ export class TestBuilder { return this; } - stderr(expected: string): this { + stderr(expected: string | ((stderr: string, tempDir: string) => void)): this { this.expected_stderr = expected; return this; } @@ -160,8 +160,13 @@ export class TestBuilder { this.expected_stdout(stdout.toString(), tempdir); } } - if (this.expected_stderr !== undefined) - expect(stderr.toString()).toEqual(this.expected_stderr.replaceAll("$TEMP_DIR", tempdir)); + if (this.expected_stderr !== undefined) { + if (typeof this.expected_stderr === "string") { + expect(stderr.toString()).toEqual(this.expected_stderr.replaceAll("$TEMP_DIR", tempdir)); + } else { + this.expected_stderr(stderr.toString(), tempdir); + } + } if (this.expected_exit_code !== undefined) expect(exitCode).toEqual(this.expected_exit_code); for (const [filename, expected] of Object.entries(this.file_equals)) { diff --git a/test/js/bun/shell/util.ts b/test/js/bun/shell/util.ts index 2972cae4b3..20b021606b 100644 --- a/test/js/bun/shell/util.ts +++ b/test/js/bun/shell/util.ts @@ -22,6 +22,7 @@ const defaultRedirect = { stderr: false, stdin: false, stdout: false, + duplicate_out: false, }; export const redirect = (opts?: Partial): typeof defaultRedirect => From dfab13e6de14dea0268e862edb1026e25f65ad51 Mon Sep 17 00:00:00 2001 From: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> Date: Wed, 21 Feb 2024 21:42:50 -0800 Subject: [PATCH 18/21] Fix compiler errors --- src/async/posix_event_loop.zig | 28 +-- src/fd.zig | 3 +- src/io/PipeReader.zig | 2 +- src/shell/interpreter.zig | 436 ++++++++++++++++----------------- src/shell/shell.zig | 18 +- src/shell/util.zig | 2 +- 6 files changed, 242 insertions(+), 247 deletions(-) diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index b91e1fca42..d67ec8110a 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -335,31 +335,31 @@ pub const FilePoll = struct { var ptr = poll.owner; switch (ptr.tag()) { // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(FIFO))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) FIFO", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) FIFO", .{poll.fd}); // ptr.as(FIFO).ready(size_or_offset, poll.flags.contains(.hup)); // }, // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedInput))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedInput", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedInput", .{poll.fd}); // ptr.as(ShellBufferedInput).onPoll(size_or_offset, 0); // }, // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedWriter))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedWriter", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedWriter", .{poll.fd}); // var loader = ptr.as(ShellBufferedWriter); // loader.onPoll(size_or_offset, 0); // }, // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedWriterMini))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellBufferedWriterMini", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellBufferedWriterMini", .{poll.fd}); // var loader = ptr.as(ShellBufferedWriterMini); // loader.onPoll(size_or_offset, 0); // }, // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocessCapturedBufferedWriter))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocessCapturedBufferedWriter", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocessCapturedBufferedWriter", .{poll.fd}); // var loader = ptr.as(ShellSubprocessCapturedBufferedWriter); // loader.onPoll(size_or_offset, 0); // }, // @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocessCapturedBufferedWriterMini))) => { - // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) ShellSubprocessCapturedBufferedWriterMini", .{poll.fd}); + // log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) ShellSubprocessCapturedBufferedWriterMini", .{poll.fd}); // var loader = ptr.as(ShellSubprocessCapturedBufferedWriterMini); // loader.onPoll(size_or_offset, 0); // }, @@ -372,19 +372,19 @@ pub const FilePoll = struct { handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(BufferedReader))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) Reader", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Reader", .{poll.fd}); var handler: *BufferedReader = ptr.as(BufferedReader); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, @field(Owner.Tag, bun.meta.typeBaseName(@typeName(Process))) => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) Process", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Process", .{poll.fd}); var loader = ptr.as(Process); loader.onWaitPidFromEventLoopTask(); }, @field(Owner.Tag, "DNSResolver") => { - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) DNSResolver", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) DNSResolver", .{poll.fd}); var loader: *DNSResolver = ptr.as(DNSResolver); loader.onDNSPoll(poll); }, @@ -394,14 +394,14 @@ pub const FilePoll = struct { unreachable; } - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) GetAddrInfoRequest", .{poll.fd}); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) GetAddrInfoRequest", .{poll.fd}); var loader: *GetAddrInfoRequest = ptr.as(GetAddrInfoRequest); loader.onMachportChange(); }, else => { const possible_name = Owner.typeNameFromTag(@intFromEnum(ptr.tag())); - log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) disconnected? (maybe: {s})", .{ poll.fd, possible_name orelse "" }); + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) disconnected? (maybe: {s})", .{ poll.fd, possible_name orelse "" }); }, } } @@ -782,7 +782,7 @@ pub const FilePoll = struct { pub fn registerWithFd(this: *FilePoll, loop: *Loop, flag: Flags, one_shot: OneShotFlag, fd: bun.FileDescriptor) JSC.Maybe(void) { const watcher_fd = loop.fd; - log("register: {s} ({d})", .{ @tagName(flag), fd }); + log("register: {s} ({})", .{ @tagName(flag), fd }); std.debug.assert(fd != invalid_fd); @@ -965,7 +965,7 @@ pub const FilePoll = struct { }; if (this.flags.contains(.needs_rearm) and !force_unregister) { - log("unregister: {s} ({d}) skipped due to needs_rearm", .{ @tagName(flag), fd }); + log("unregister: {s} ({}) skipped due to needs_rearm", .{ @tagName(flag), fd }); this.flags.remove(.poll_process); this.flags.remove(.poll_readable); this.flags.remove(.poll_process); @@ -973,7 +973,7 @@ pub const FilePoll = struct { return JSC.Maybe(void).success; } - log("unregister: {s} ({d})", .{ @tagName(flag), fd }); + log("unregister: {s} ({})", .{ @tagName(flag), fd }); if (comptime Environment.isLinux) { const ctl = linux.epoll_ctl( diff --git a/src/fd.zig b/src/fd.zig index 287a8fe4e2..285a570f8b 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -216,7 +216,7 @@ pub const FDImpl = packed struct { // Format the file descriptor for logging BEFORE closing it. // Otherwise the file descriptor is always invalid after closing it. var buf: if (env.isDebug) [1050]u8 else void = undefined; - const this_fmt = if (env.isDebug) std.fmt.bufPrint(&buf, "{d}", .{this}) catch unreachable; + const this_fmt = if (env.isDebug) std.fmt.bufPrint(&buf, "{}", .{this}) catch unreachable; const result: ?bun.sys.Error = switch (env.os) { .linux => result: { @@ -310,7 +310,6 @@ pub const FDImpl = packed struct { return; } - if (fmt.len != 0) { // The reason for this error is because formatting FD as an integer on windows is // ambiguous and almost certainly a mistake. You probably meant to format fd.cast(). diff --git a/src/io/PipeReader.zig b/src/io/PipeReader.zig index ec78c1a9f4..5c6558f63e 100644 --- a/src/io/PipeReader.zig +++ b/src/io/PipeReader.zig @@ -59,7 +59,7 @@ pub fn PosixPipeReader( pub fn onPoll(parent: *This, size_hint: isize, received_hup: bool) void { const resizable_buffer = vtable.getBuffer(parent); const fd = vtable.getFd(parent); - bun.sys.syslog("onPoll({d}) = {d}", .{ fd, size_hint }); + bun.sys.syslog("onPoll({}) = {d}", .{ fd, size_hint }); switch (vtable.getFileType(parent)) { .nonblocking_pipe => { diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 382c300841..99e17fbbc1 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -856,7 +856,7 @@ pub const Interpreter = struct { jsobjs.items[0..], )) { .result => |i| i, - .err => |e| { + .err => |*e| { arena.deinit(); throwShellErr(e, .{ .js = globalThis.bunVM().event_loop }); return null; @@ -1026,7 +1026,7 @@ pub const Interpreter = struct { const script_heap = try arena.allocator().create(ast.Script); script_heap.* = script; var interp = switch (ThisInterpreter.init(.{ .mini = mini }, bun.default_allocator, &arena, script_heap, jsobjs)) { - .err => |e| { + .err => |*e| { throwShellErr(e, .{ .mini = mini }); return; }, @@ -1072,7 +1072,7 @@ pub const Interpreter = struct { const script_heap = try arena.allocator().create(ast.Script); script_heap.* = script; var interp = switch (ThisInterpreter.init(mini, bun.default_allocator, &arena, script_heap, jsobjs)) { - .err => |e| { + .err => |*e| { throwShellErr(e, .{ .mini = mini }); return; }, @@ -1610,7 +1610,7 @@ pub const Interpreter = struct { const shell_state = switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, io, .cmd_subst)) { .result => |s| s, .err => |e| { - throwShellErr(bun.shell.ShellErr.newSys(e), this.base.eventLoop()); + this.base.throw(&bun.shell.ShellErr.newSys(e)); return false; }, }; @@ -1637,7 +1637,7 @@ pub const Interpreter = struct { const shell_state = switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, io, .cmd_subst)) { .result => |s| s, .err => |e| { - throwShellErr(bun.shell.ShellErr.newSys(e), this.base.eventLoop()); + this.base.throw(&bun.shell.ShellErr.newSys(e)); return false; }, }; @@ -1655,23 +1655,7 @@ pub const Interpreter = struct { this.child_state = .idle; } } - - b = i + 1; - if (c == ' ') { - b = i; - prev_whitespace = true; - this.current_out.appendSlice(stdout[a..b]) catch bun.outOfMemory(); - this.pushCurrentOut(); - // const slice_z = this.base.interpreter.allocator.dupeZ(u8, stdout[a..b]) catch bun.outOfMemory(); - // this.pushResultSlice(slice_z); - } - } - // "aa bbb" - - this.current_out.appendSlice(stdout[a..b]) catch bun.outOfMemory(); - // this.pushCurrentOut(); - // const slice_z = this.base.interpreter.allocator.dupeZ(u8, stdout[a..b]) catch bun.outOfMemory(); - // this.pushResultSlice(slice_z); + }, } return false; @@ -1806,15 +1790,15 @@ pub const Interpreter = struct { std.debug.assert(this.child_state == .glob); } - if (task.err != null) { - switch (task.err.?) { + if (task.err) |*err| { + switch (err.*) { .syscall => { - throwShellErr(bun.shell.ShellErr.newSys(task.err.?.syscall), this.base.eventLoop()); + this.base.throw(&bun.shell.ShellErr.newSys(task.err.?.syscall)); }, .unknown => |errtag| { - throwShellErr(.{ + this.base.throw(&.{ .custom = bun.default_allocator.dupe(u8, @errorName(errtag)) catch bun.outOfMemory(), - }, this.base.eventLoop()); + }); }, } } @@ -2110,9 +2094,13 @@ pub const Interpreter = struct { interpreter: *ThisInterpreter, shell: *ShellState, - pub inline fn eventLoop(this: *State) JSC.EventLoopHandle { + pub inline fn eventLoop(this: *const State) JSC.EventLoopHandle { return this.interpreter.event_loop; } + + pub fn throw(this: *const State, err: *const bun.shell.ShellErr) void { + throwShellErr(err, this.eventLoop()); + } }; pub const Script = struct { @@ -2790,7 +2778,7 @@ pub const Interpreter = struct { } if (err) |e| { - throwShellErr(shell.ShellErr.newSys(e), this.base.eventLoop()); + this.base.throw(&shell.ShellErr.newSys(e)); return; } @@ -3104,7 +3092,6 @@ pub const Interpreter = struct { .node = node, .parent = parent, - .spawn_arena = bun.ArenaAllocator.init(interpreter.allocator), .args = std.ArrayList(?[*:0]const u8).initCapacity(cmd.spawn_arena.allocator(), node.name_and_args.len) catch bun.outOfMemory(), .redirection_file = undefined, @@ -3227,7 +3214,7 @@ pub const Interpreter = struct { pub fn onBufferedWriterDone(this: *Cmd, e: ?Syscall.Error) void { if (e) |err| { - throwShellErr(bun.shell.ShellErr.newSys(err), this.base.eventLoop()); + this.base.throw(&bun.shell.ShellErr.newSys(err)); return; } std.debug.assert(this.state == .waiting_write_err); @@ -3486,8 +3473,8 @@ pub const Interpreter = struct { } }; const subproc = switch (Subprocess.spawnAsync(this.base.eventLoop(), spawn_args, &this.exec.subproc.child)) { .result => this.exec.subproc.child, - .err => |e| { - throwShellErr(e, this.base.eventLoop()); + .err => |*e| { + this.base.throw(e); return; }, }; @@ -3881,203 +3868,212 @@ pub const Interpreter = struct { io_: *IO, comptime in_cmd_subst: bool, ) CoroutineResult { - const io = io_.*; + const io = io_.*; - const stdin: Builtin.BuiltinIO = switch (io.stdin) { - .std => .{ .fd = bun.STDIN_FD }, - .fd => |fd| .{ .fd = fd }, - .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, - .ignore => .ignore, - }; - const stdout: Builtin.BuiltinIO = switch (io.stdout) { - .std => if (io.stdout.std.captured) |bytelist| .{ .captured = .{ .out_kind = .stdout, .bytelist = bytelist } } else .{ .fd = bun.STDOUT_FD }, - .fd => |fd| .{ .fd = fd }, - .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, - .ignore => .ignore, - }; - const stderr: Builtin.BuiltinIO = switch (io.stderr) { - .std => if (io.stderr.std.captured) |bytelist| .{ .captured = .{ .out_kind = .stderr, .bytelist = bytelist } } else .{ .fd = bun.STDERR_FD }, - .fd => |fd| .{ .fd = fd }, - .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, - .ignore => .ignore, - }; + const stdin: Builtin.BuiltinIO = switch (io.stdin) { + .std => .{ .fd = bun.STDIN_FD }, + .fd => |fd| .{ .fd = fd }, + .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, + .ignore => .ignore, + }; + const stdout: Builtin.BuiltinIO = switch (io.stdout) { + .std => if (io.stdout.std.captured) |bytelist| .{ .captured = .{ .out_kind = .stdout, .bytelist = bytelist } } else .{ .fd = bun.STDOUT_FD }, + .fd => |fd| .{ .fd = fd }, + .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, + .ignore => .ignore, + }; + const stderr: Builtin.BuiltinIO = switch (io.stderr) { + .std => if (io.stderr.std.captured) |bytelist| .{ .captured = .{ .out_kind = .stderr, .bytelist = bytelist } } else .{ .fd = bun.STDERR_FD }, + .fd => |fd| .{ .fd = fd }, + .pipe => .{ .buf = std.ArrayList(u8).init(interpreter.allocator) }, + .ignore => .ignore, + }; - cmd.exec = .{ - .bltn = Builtin{ - .kind = kind, - .stdin = stdin, - .stdout = stdout, - .stderr = stderr, - .exit_code = null, - .arena = arena, - .args = args, - .export_env = export_env, - .cmd_local_env = cmd_local_env, - .cwd = cwd, - .impl = undefined, - }, - }; + cmd.exec = .{ + .bltn = Builtin{ + .kind = kind, + .stdin = stdin, + .stdout = stdout, + .stderr = stderr, + .exit_code = null, + .arena = arena, + .args = args, + .export_env = export_env, + .cmd_local_env = cmd_local_env, + .cwd = cwd, + .impl = undefined, + }, + }; - switch (kind) { - .@"export" => { - cmd.exec.bltn.impl = .{ - .@"export" = Export{ .bltn = &cmd.exec.bltn }, - }; - }, - .rm => { - cmd.exec.bltn.impl = .{ - .rm = Rm{ - .bltn = &cmd.exec.bltn, - .opts = .{}, - }, - }; - }, - .echo => { - cmd.exec.bltn.impl = .{ - .echo = Echo{ - .bltn = &cmd.exec.bltn, - .output = std.ArrayList(u8).init(arena.allocator()), - }, - }; - }, - .cd => { - cmd.exec.bltn.impl = .{ - .cd = Cd{ - .bltn = &cmd.exec.bltn, - }, - }; - }, - .which => { - cmd.exec.bltn.impl = .{ - .which = Which{ - .bltn = &cmd.exec.bltn, - }, - }; - }, - .pwd => { - cmd.exec.bltn.impl = .{ - .pwd = Pwd{ .bltn = &cmd.exec.bltn }, - }; - }, - .mv => { - cmd.exec.bltn.impl = .{ - .mv = Mv{ .bltn = &cmd.exec.bltn }, - }; - }, - .ls => { - cmd.exec.bltn.impl = .{ - .ls = Ls{ - .bltn = &cmd.exec.bltn, - }, - }; - }, - } + switch (kind) { + .@"export" => { + cmd.exec.bltn.impl = .{ + .@"export" = Export{ .bltn = &cmd.exec.bltn }, + }; + }, + .rm => { + cmd.exec.bltn.impl = .{ + .rm = Rm{ + .bltn = &cmd.exec.bltn, + .opts = .{}, + }, + }; + }, + .echo => { + cmd.exec.bltn.impl = .{ + .echo = Echo{ + .bltn = &cmd.exec.bltn, + .output = std.ArrayList(u8).init(arena.allocator()), + }, + }; + }, + .cd => { + cmd.exec.bltn.impl = .{ + .cd = Cd{ + .bltn = &cmd.exec.bltn, + }, + }; + }, + .which => { + cmd.exec.bltn.impl = .{ + .which = Which{ + .bltn = &cmd.exec.bltn, + }, + }; + }, + .pwd => { + cmd.exec.bltn.impl = .{ + .pwd = Pwd{ .bltn = &cmd.exec.bltn }, + }; + }, + .mv => { + cmd.exec.bltn.impl = .{ + .mv = Mv{ .bltn = &cmd.exec.bltn }, + }; + }, + .ls => { + cmd.exec.bltn.impl = .{ + .ls = Ls{ + .bltn = &cmd.exec.bltn, + }, + }; + }, + } - if (node.redirect_file) |file| brk: { - if (comptime in_cmd_subst) { - if (node.redirect.stdin) { - stdin = .ignore; - } - - if (node.redirect.stdout) { - stdout = .ignore; - } - - if (node.redirect.stderr) { - stdout = .ignore; - } - - break :brk; + if (node.redirect_file) |file| brk: { + if (comptime in_cmd_subst) { + if (node.redirect.stdin) { + stdin = .ignore; } - switch (file) { - .atom => { - if (cmd.redirection_file.items.len == 0) { - const buf = std.fmt.allocPrint(arena.allocator(), "bun: ambiguous redirect: at `{s}`\n", .{@tagName(kind)}) catch bun.outOfMemory(); - cmd.writeFailingError(buf, 1); - return .yield; - } - const path = cmd.redirection_file.items[0..cmd.redirection_file.items.len -| 1 :0]; - log("EXPANDED REDIRECT: {s}\n", .{cmd.redirection_file.items[0..]}); - const perm = 0o666; - const flags = node.redirect.toFlags(); - const redirfd = switch (Syscall.openat(cmd.base.shell.cwd_fd, path, flags, perm)) { - .err => |e| { - const buf = std.fmt.allocPrint(arena.allocator(), "bun: {s}: {s}", .{ e.toSystemError().message, path }) catch bun.outOfMemory(); - cmd.writeFailingError(buf, 1); - return .yield; - }, - .result => |f| f, - }; - // cmd.redirection_fd = redirfd; - if (node.redirect.stdin) { - cmd.exec.bltn.stdin = .{ .fd = redirfd }; - } - if (node.redirect.stdout) { - cmd.exec.bltn.stdout = .{ .fd = redirfd }; - } - if (node.redirect.stderr) { - cmd.exec.bltn.stderr = .{ .fd = redirfd }; - } - }, - .jsbuf => |val| { - if (comptime EventLoopKind == .mini) @panic("This should nevver happened"); - if (interpreter.jsobjs[file.jsbuf.idx].asArrayBuffer(interpreter.global)) |buf| { - const builtinio: Builtin.BuiltinIO = .{ .arraybuf = .{ .buf = JSC.ArrayBuffer.Strong{ - .array_buffer = buf, - .held = JSC.Strong.create(buf.value, interpreter.global), - }, .i = 0 } }; - - if (node.redirect.stdin) { - cmd.exec.bltn.stdin = builtinio; - } - - if (node.redirect.stdout) { - cmd.exec.bltn.stdout = builtinio; - } - - if (node.redirect.stderr) { - cmd.exec.bltn.stderr = builtinio; - } - } else if (interpreter.jsobjs[file.jsbuf.idx].as(JSC.WebCore.Blob)) |blob| { - const builtinio: Builtin.BuiltinIO = .{ .blob = bun.newWithAlloc(arena.allocator(), JSC.WebCore.Blob, blob.dupe()) }; - - if (node.redirect.stdin) { - cmd.exec.bltn.stdin = builtinio; - } - - if (node.redirect.stdout) { - cmd.exec.bltn.stdout = builtinio; - } - - if (node.redirect.stderr) { - cmd.exec.bltn.stderr = builtinio; - } - } else { - const jsval = cmd.base.interpreter.jsobjs[val.idx]; - global_handle.get().globalThis.throw("Unknown JS value used in shell: {}", .{jsval.fmtString(global_handle.get().globalThis)}); - return .yield; - } - }, - } - } else if (node.redirect.duplicate_out) { if (node.redirect.stdout) { - cmd.exec.bltn.stderr = cmd.exec.bltn.stdout; + stdout = .ignore; } if (node.redirect.stderr) { - cmd.exec.bltn.stdout = cmd.exec.bltn.stderr; + stdout = .ignore; } + + break :brk; } - return .cont; + switch (file) { + .atom => { + if (cmd.redirection_file.items.len == 0) { + const buf = std.fmt.allocPrint(arena.allocator(), "bun: ambiguous redirect: at `{s}`\n", .{@tagName(kind)}) catch bun.outOfMemory(); + cmd.writeFailingError(buf, 1); + return .yield; + } + const path = cmd.redirection_file.items[0..cmd.redirection_file.items.len -| 1 :0]; + log("EXPANDED REDIRECT: {s}\n", .{cmd.redirection_file.items[0..]}); + const perm = 0o666; + const flags = node.redirect.toFlags(); + const redirfd = switch (Syscall.openat(cmd.base.shell.cwd_fd, path, flags, perm)) { + .err => |e| { + const buf = std.fmt.allocPrint(arena.allocator(), "bun: {s}: {s}", .{ e.toSystemError().message, path }) catch bun.outOfMemory(); + cmd.writeFailingError(buf, 1); + return .yield; + }, + .result => |f| f, + }; + // cmd.redirection_fd = redirfd; + if (node.redirect.stdin) { + cmd.exec.bltn.stdin = .{ .fd = redirfd }; + } + if (node.redirect.stdout) { + cmd.exec.bltn.stdout = .{ .fd = redirfd }; + } + if (node.redirect.stderr) { + cmd.exec.bltn.stderr = .{ .fd = redirfd }; + } + }, + .jsbuf => |val| { + const globalObject = interpreter.event_loop.js.global; + if (interpreter.jsobjs[file.jsbuf.idx].asArrayBuffer(globalObject)) |buf| { + const builtinio: Builtin.BuiltinIO = .{ .arraybuf = .{ .buf = JSC.ArrayBuffer.Strong{ + .array_buffer = buf, + .held = JSC.Strong.create(buf.value, globalObject), + }, .i = 0 } }; + + if (node.redirect.stdin) { + cmd.exec.bltn.stdin = builtinio; + } + + if (node.redirect.stdout) { + cmd.exec.bltn.stdout = builtinio; + } + + if (node.redirect.stderr) { + cmd.exec.bltn.stderr = builtinio; + } + } else if (interpreter.jsobjs[file.jsbuf.idx].as(JSC.WebCore.Blob)) |blob| { + const builtinio: Builtin.BuiltinIO = .{ .blob = bun.newWithAlloc(arena.allocator(), JSC.WebCore.Blob, blob.dupe()) }; + + if (node.redirect.stdin) { + cmd.exec.bltn.stdin = builtinio; + } + + if (node.redirect.stdout) { + cmd.exec.bltn.stdout = builtinio; + } + + if (node.redirect.stderr) { + cmd.exec.bltn.stderr = builtinio; + } + } else { + const jsval = cmd.base.interpreter.jsobjs[val.idx]; + cmd.base.interpreter.event_loop.js.global.throw("Unknown JS value used in shell: {}", .{jsval.fmtString(globalObject)}); + return .yield; + } + }, + } + } else if (node.redirect.duplicate_out) { + if (node.redirect.stdout) { + cmd.exec.bltn.stderr = cmd.exec.bltn.stdout; + } + + if (node.redirect.stderr) { + cmd.exec.bltn.stdout = cmd.exec.bltn.stderr; + } + } + + return .cont; } - pub inline fn eventLoop(this: *Builtin) JSC.EventLoopHandle { + pub inline fn eventLoop(this: *const Builtin) JSC.EventLoopHandle { return this.parentCmd().base.eventLoop(); } - pub inline fn parentCmd(this: *Builtin) *Cmd { + pub inline fn throw(this: *const Builtin, err: *const bun.shell.ShellErr) void { + this.parentCmd().base.throw(err); + } + + pub inline fn parentCmd(this: *const Builtin) *const Cmd { + const union_ptr = @fieldParentPtr(Cmd.Exec, "bltn", this); + return @fieldParentPtr(Cmd, "exec", union_ptr); + } + + pub inline fn parentCmdMut(this: *Builtin) *Cmd { const union_ptr = @fieldParentPtr(Cmd.Exec, "bltn", this); return @fieldParentPtr(Cmd, "exec", union_ptr); } @@ -4088,7 +4084,7 @@ pub const Interpreter = struct { // } this.exit_code = exit_code; - var cmd = this.parentCmd(); + var cmd = this.parentCmdMut(); log("builtin done ({s}: exit={d}) cmd to free: ({x})", .{ @tagName(this.kind), exit_code, @intFromPtr(cmd) }); cmd.exit_code = this.exit_code.?; @@ -4945,11 +4941,11 @@ pub const Interpreter = struct { if (paths) |p| { for (p) |path_raw| { const path = path_raw[0..std.mem.len(path_raw) :0]; - var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, path, this.bltn.parentCmd().base.eventLoop()); + var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, path, this.bltn.eventLoop()); task.schedule(); } } else { - var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, ".", this.bltn.parentCmd().base.eventLoop()); + var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, ".", this.bltn.eventLoop()); task.schedule(); } }, @@ -5059,7 +5055,7 @@ pub const Interpreter = struct { // if (!need_to_write_to_stdout_with_io) return; // yield execution } else { if (this.bltn.writeNoIO(.stderr, error_string).asErr()) |theerr| { - throwShellErr(bun.shell.ShellErr.newSys(theerr), this.bltn.eventLoop()); + this.bltn.throw(&bun.shell.ShellErr.newSys(theerr)); } } } @@ -5090,7 +5086,7 @@ pub const Interpreter = struct { defer output.deinit(); if (this.bltn.writeNoIO(.stdout, output.items[0..]).asErr()) |e| { - throwShellErr(bun.shell.ShellErr.newSys(e), this.bltn.eventLoop()); + this.bltn.throw(&bun.shell.ShellErr.newSys(e)); return; } @@ -6743,7 +6739,7 @@ pub const Interpreter = struct { const error_string = this.bltn.taskErrorToString(.rm, err); if (!this.bltn.stderr.needsIO()) { if (this.bltn.writeNoIO(.stderr, error_string).asErr()) |e| { - throwShellErr(bun.shell.ShellErr.newSys(e), this.bltn.parentCmd().base.eventLoop()); + this.bltn.throw(&bun.shell.ShellErr.newSys(e)); return; } } else { @@ -6779,7 +6775,7 @@ pub const Interpreter = struct { fn writeVerbose(this: *Rm, verbose: *ShellRmTask.DirTask) void { if (!this.bltn.stdout.needsIO()) { if (this.bltn.writeNoIO(.stdout, verbose.deleted_entries.items[0..]).asErr()) |err| { - throwShellErr(bun.shell.ShellErr.newSys(err), this.bltn.parentCmd().base.eventLoop()); + this.bltn.parentCmd().base.throw(&bun.shell.ShellErr.newSys(err)); return; } // _ = this.state.exec.output_done.fetchAdd(1, .SeqCst); @@ -7772,7 +7768,7 @@ inline fn fastMod(val: anytype, comptime rhs: comptime_int) @TypeOf(val) { return val & (rhs - 1); } -fn throwShellErr(e: bun.shell.ShellErr, event_loop: JSC.EventLoopHandle) void { +fn throwShellErr(e: *const bun.shell.ShellErr, event_loop: JSC.EventLoopHandle) void { switch (event_loop) { .mini => e.throwMini(), .js => e.throwJS(event_loop.js.global), diff --git a/src/shell/shell.zig b/src/shell/shell.zig index 5ebc81b74d..4c9e69926a 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -67,8 +67,8 @@ pub const ShellErr = union(enum) { } } - pub fn throwJS(this: @This(), globalThis: *JSC.JSGlobalObject) void { - switch (this) { + pub fn throwJS(this: *const @This(), globalThis: *JSC.JSGlobalObject) void { + switch (this.*) { .sys => { const err = this.sys.toErrorInstance(globalThis); globalThis.throwValue(err); @@ -77,7 +77,7 @@ pub const ShellErr = union(enum) { var str = JSC.ZigString.init(this.custom); str.markUTF8(); const err_value = str.toErrorInstance(globalThis); - globalThis.vm().throwError(globalThis, err_value); + globalThis.throwValue(err_value); // this.bunVM().allocator.free(JSC.ZigString.untagged(str._unsafe_ptr_do_not_use)[0..str.len]); }, .invalid_arguments => { @@ -2105,7 +2105,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { fn eatJSSubstitutionIdx(self: *@This(), comptime literal: []const u8, comptime name: []const u8, comptime validate: *const fn (*@This(), usize) bool) ?usize { if (self.matchesAsciiLiteral(literal[1..literal.len])) { - const bytes = self.chars.srcBytesAtCursor(); + const bytes = self.chars.srcBytesAtCursor(); var i: usize = 0; var digit_buf: [32]u8 = undefined; var digit_buf_count: u8 = 0; @@ -2146,9 +2146,9 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { // } // Bump the cursor - const new_idx = self.chars.cursorPos() + i; - const prev_ascii_char: ?u7 = if (digit_buf_count == 1) null else @truncate(digit_buf[digit_buf_count - 2]); - const cur_ascii_char: u7 = @truncate(digit_buf[digit_buf_count - 1]); + const new_idx = self.chars.cursorPos() + i; + const prev_ascii_char: ?u7 = if (digit_buf_count == 1) null else @truncate(digit_buf[digit_buf_count - 2]); + const cur_ascii_char: u7 = @truncate(digit_buf[digit_buf_count - 1]); self.bumpCursorAscii(new_idx, prev_ascii_char, cur_ascii_char); // return self.string_refs[idx]; @@ -3001,8 +3001,8 @@ pub fn escapeBunStr(bunstr: bun.String, outbuf: *std.ArrayList(u8), comptime add return try escapeUtf16(bunstr.utf16(), outbuf, add_quotes); } if (bunstr.isUTF8()) { - try escapeWTF8(bunstr.byteSlice(), outbuf, add_quotes); - return true; + try escapeWTF8(bunstr.byteSlice(), outbuf, add_quotes); + return true; } // otherwise should be latin-1 or ascii try escape8Bit(bunstr.byteSlice(), outbuf, add_quotes); diff --git a/src/shell/util.zig b/src/shell/util.zig index 6f60861bd8..1b448c818c 100644 --- a/src/shell/util.zig +++ b/src/shell/util.zig @@ -25,6 +25,6 @@ pub const OutKind = enum { } }; - pub const Stdio = bun.spawn.Stdio; +pub const Stdio = bun.spawn.Stdio; pub const WatchFd = if (Environment.isLinux) std.os.fd_t else i32; From 0a42ac0deda782847fdec70047b3d69f42740fe2 Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Thu, 22 Feb 2024 08:57:36 -0300 Subject: [PATCH 19/21] more stable stream and now Content-Range pass --- src/bun.js/api/server.zig | 25 ++++++++++++++----------- src/bun.js/webcore/blob/ReadFile.zig | 21 +++++++++------------ src/bun.js/webcore/body.zig | 7 ++++--- src/bun.js/webcore/streams.zig | 5 +++-- 4 files changed, 30 insertions(+), 28 deletions(-) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index e9ae8c8d93..32ae10be2c 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2140,11 +2140,11 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (this.blob == .Blob) { const original_size = this.blob.Blob.size; - + // if we dont know the size we use the stat size this.blob.Blob.size = if (original_size == 0 or original_size == Blob.max_size) stat_size - else - @min(original_size, stat_size); + else // the blob can be a slice of a file + @max(original_size, stat_size); } if (!this.flags.has_written_status) @@ -2158,6 +2158,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .auto_close = false, .socket_fd = bun.invalid_fd, }; + this.response_buf_owned = .{ .items = result.result.buf, .capacity = result.result.buf.len }; this.resp.?.runCorkedWithType(*RequestContext, renderResponseBufferAndMetadata, this); } @@ -2196,7 +2197,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp var this = pair.this; var stream = pair.stream; if (this.resp == null or this.flags.aborted) { - stream.value.unprotect(); + // stream.value.unprotect(); this.finalizeForAbort(); return; } @@ -2264,7 +2265,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp response_stream.sink.destroy(); this.endStream(this.shouldCloseConnection()); this.finalize(); - stream.value.unprotect(); + // stream.value.unprotect(); return; } @@ -2293,6 +2294,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .global = globalThis, }, }; + stream.incrementCount(); assignment_result.then( globalThis, this, @@ -2304,13 +2306,13 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp }, .Fulfilled => { streamLog("promise Fulfilled", .{}); - defer stream.value.unprotect(); + // defer stream.value.unprotect(); this.handleResolveStream(); }, .Rejected => { streamLog("promise Rejected", .{}); - defer stream.value.unprotect(); + // defer stream.value.unprotect(); this.handleRejectStream(globalThis, promise.result(globalThis.vm())); }, @@ -2330,7 +2332,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (this.flags.aborted) { response_stream.detach(); stream.cancel(globalThis); - defer stream.value.unprotect(); + // defer stream.value.unprotect(); response_stream.sink.markDone(); this.finalizeForAbort(); response_stream.sink.onFirstWrite = null; @@ -2340,7 +2342,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } stream.value.ensureStillAlive(); - defer stream.value.unprotect(); + // defer stream.value.unprotect(); const is_in_progress = response_stream.sink.has_backpressure or !(response_stream.sink.wrote == 0 and response_stream.sink.buffer.len == 0); @@ -2691,7 +2693,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .code = bun.String.static(@as(string, @tagName(JSC.Node.ErrorCode.ERR_STREAM_CANNOT_PIPE))), .message = bun.String.static("Stream already used, please create a new one"), }; - stream.value.unprotect(); + // stream.value.unprotect(); this.runErrorHandler(err.toErrorInstance(this.server.globalThis)); return; } @@ -3046,7 +3048,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp var response: *JSC.WebCore.Response = this.response_ptr.?; var status = response.statusCode(); - var needs_content_range = this.flags.needs_content_range and this.sendfile.remain <= this.blob.size(); + var needs_content_range = this.flags.needs_content_range and this.sendfile.remain < this.blob.size(); + const size = if (needs_content_range) this.sendfile.remain else diff --git a/src/bun.js/webcore/blob/ReadFile.zig b/src/bun.js/webcore/blob/ReadFile.zig index bd1d7907ba..4739979ca7 100644 --- a/src/bun.js/webcore/blob/ReadFile.zig +++ b/src/bun.js/webcore/blob/ReadFile.zig @@ -63,6 +63,7 @@ pub const ReadFile = struct { store: ?*Store = null, offset: SizeType = 0, max_length: SizeType = Blob.max_size, + total_size: SizeType = Blob.max_size, opened_fd: bun.FileDescriptor = invalid_fd, read_off: SizeType = 0, read_eof: bool = false, @@ -287,7 +288,6 @@ pub const ReadFile = struct { const buf = this.buffer.items; defer store.deref(); - const total_size = this.size; const system_error = this.system_error; bun.destroy(this); @@ -296,7 +296,7 @@ pub const ReadFile = struct { return; } - cb(cb_ctx, .{ .result = .{ .buf = buf, .total_size = total_size, .is_temporary = true } }); + cb(cb_ctx, .{ .result = .{ .buf = buf, .total_size = this.total_size, .is_temporary = true } }); } pub fn run(this: *ReadFile, task: *ReadFileTask) void { @@ -368,12 +368,10 @@ pub const ReadFile = struct { } this.could_block = !bun.isRegularFile(stat.mode); + this.total_size = @truncate(@as(SizeType, @intCast(@max(@as(i64, @intCast(stat.size)), 0)))); if (stat.size > 0 and !this.could_block) { - this.size = @min( - @as(SizeType, @truncate(@as(SizeType, @intCast(@max(@as(i64, @intCast(stat.size)), 0))))), - this.max_length, - ); + this.size = @min(this.total_size, this.max_length); // read up to 4k at a time if // they didn't explicitly set a size and we're reading from something that's not a regular file } else if (stat.size == 0 and this.could_block) { @@ -556,6 +554,7 @@ pub const ReadFileUV = struct { store: *Store, offset: SizeType = 0, max_length: SizeType = Blob.max_size, + total_size: SizeType = Blob.max_size, opened_fd: bun.FileDescriptor = invalid_fd, read_len: SizeType = 0, read_off: SizeType = 0, @@ -602,9 +601,8 @@ pub const ReadFileUV = struct { cb(cb_ctx, ReadFile.ResultType{ .err = err }); return; } - const size = this.size; - cb(cb_ctx, .{ .result = .{ .buf = buf, .total_size = size, .is_temporary = true } }); + cb(cb_ctx, .{ .result = .{ .buf = buf, .total_size = this.total_size, .is_temporary = true } }); } pub fn isAllowedToClose(this: *const ReadFileUV) bool { @@ -617,6 +615,7 @@ pub const ReadFileUV = struct { const needs_close = fd != bun.invalid_fd; this.size = @max(this.read_len, this.size); + this.total_size = @max(this.total_size, this.size); if (needs_close) { if (this.doClose(this.isAllowedToClose())) { @@ -678,13 +677,11 @@ pub const ReadFileUV = struct { this.onFinish(); return; } + this.total_size = @truncate(@as(SizeType, @intCast(@max(@as(i64, @intCast(stat.size)), 0)))); this.could_block = !bun.isRegularFile(stat.mode); if (stat.size > 0 and !this.could_block) { - this.size = @min( - @as(SizeType, @truncate(@as(SizeType, @intCast(@max(@as(i64, @intCast(stat.size)), 0))))), - this.max_length, - ); + this.size = @min(this.total_size, this.max_length); // read up to 4k at a time if // they didn't explicitly set a size and we're reading from something that's not a regular file } else if (stat.size == 0 and this.could_block) { diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index 4babc8745b..4b76f1e0ee 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -393,6 +393,7 @@ pub const Body = struct { .global = globalThis, }, }; + this.Locked.readable.?.incrementCount(); return value; @@ -442,7 +443,7 @@ pub const Body = struct { .ptr = .{ .Bytes = &reader.context }, .value = reader.toReadableStream(globalThis), }; - locked.readable.?.value.protect(); + locked.readable.?.incrementCount(); if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { onReadableStreamAvailable(locked.task.?, locked.readable.?); @@ -1360,12 +1361,12 @@ pub const BodyValueBufferer = struct { ); }, .Fulfilled => { - defer stream.value.unprotect(); + // defer stream.value.unprotect(); sink.handleResolveStream(false); }, .Rejected => { - defer stream.value.unprotect(); + // defer stream.value.unprotect(); sink.handleRejectStream(promise.result(globalThis.vm()), false); }, diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 0bb0b681ca..a11f69e306 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -52,6 +52,7 @@ pub const ReadableStream = struct { ptr: Source, pub fn incrementCount(this: *const ReadableStream) void { + this.value.protect(); switch (this.ptr) { .Blob => |blob| blob.parent().incrementCount(), .File => |file| file.parent().incrementCount(), @@ -161,13 +162,13 @@ pub const ReadableStream = struct { pub fn cancel(this: *const ReadableStream, globalThis: *JSGlobalObject) void { JSC.markBinding(@src()); ReadableStream__cancel(this.value, globalThis); - this.value.unprotect(); + this.detachIfPossible(globalThis); } pub fn abort(this: *const ReadableStream, globalThis: *JSGlobalObject) void { JSC.markBinding(@src()); ReadableStream__cancel(this.value, globalThis); - this.value.unprotect(); + this.detachIfPossible(globalThis); } pub fn forceDetach(this: *const ReadableStream, globalObject: *JSGlobalObject) void { From fe01d9b783518879fd4a0daddd0eb86549571a2a Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Thu, 22 Feb 2024 09:02:47 -0300 Subject: [PATCH 20/21] make windows compile again --- src/output.zig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/output.zig b/src/output.zig index 5515ecbc9c..a204cd75f2 100644 --- a/src/output.zig +++ b/src/output.zig @@ -829,7 +829,8 @@ fn scopedWriter() std.fs.File.Writer { std.fs.cwd().fd, path, std.os.O.TRUNC | std.os.O.CREAT | std.os.O.WRONLY, - 0o644, + // on windows this is u0 + if (Environment.isWindows) 0 else 0o644, ) catch |err_| { // Ensure we don't panic inside panic Scoped.loaded_env = false; From a70d0df7c99b83a855309606ef310de89afb5ddd Mon Sep 17 00:00:00 2001 From: cirospaciari Date: Thu, 22 Feb 2024 13:02:37 -0300 Subject: [PATCH 21/21] revert stuff until the fix is actually ready --- src/bun.js/api/server.zig | 19 +++++++++---------- src/bun.js/webcore/body.zig | 22 +++++++++++----------- src/bun.js/webcore/streams.zig | 8 ++++---- 3 files changed, 24 insertions(+), 25 deletions(-) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 32ae10be2c..eafee2f0b2 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2197,7 +2197,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp var this = pair.this; var stream = pair.stream; if (this.resp == null or this.flags.aborted) { - // stream.value.unprotect(); + stream.value.unprotect(); this.finalizeForAbort(); return; } @@ -2265,7 +2265,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp response_stream.sink.destroy(); this.endStream(this.shouldCloseConnection()); this.finalize(); - // stream.value.unprotect(); + stream.value.unprotect(); return; } @@ -2294,7 +2294,6 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .global = globalThis, }, }; - stream.incrementCount(); assignment_result.then( globalThis, this, @@ -2306,13 +2305,13 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp }, .Fulfilled => { streamLog("promise Fulfilled", .{}); - // defer stream.value.unprotect(); + defer stream.value.unprotect(); this.handleResolveStream(); }, .Rejected => { streamLog("promise Rejected", .{}); - // defer stream.value.unprotect(); + defer stream.value.unprotect(); this.handleRejectStream(globalThis, promise.result(globalThis.vm())); }, @@ -2332,7 +2331,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (this.flags.aborted) { response_stream.detach(); stream.cancel(globalThis); - // defer stream.value.unprotect(); + defer stream.value.unprotect(); response_stream.sink.markDone(); this.finalizeForAbort(); response_stream.sink.onFirstWrite = null; @@ -2342,7 +2341,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } stream.value.ensureStillAlive(); - // defer stream.value.unprotect(); + defer stream.value.unprotect(); const is_in_progress = response_stream.sink.has_backpressure or !(response_stream.sink.wrote == 0 and response_stream.sink.buffer.len == 0); @@ -2559,7 +2558,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (req.response_ptr) |resp| { if (resp.body.value == .Locked) { - resp.body.value.Locked.readable.?.done(req.server.globalThis); + resp.body.value.Locked.readable.?.done(); resp.body.value = .{ .Used = {} }; } } @@ -2619,7 +2618,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (req.response_ptr) |resp| { if (resp.body.value == .Locked) { - resp.body.value.Locked.readable.?.done(req.server.globalThis); + resp.body.value.Locked.readable.?.done(); resp.body.value = .{ .Used = {} }; } } @@ -2693,7 +2692,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp .code = bun.String.static(@as(string, @tagName(JSC.Node.ErrorCode.ERR_STREAM_CANNOT_PIPE))), .message = bun.String.static("Stream already used, please create a new one"), }; - // stream.value.unprotect(); + stream.value.unprotect(); this.runErrorHandler(err.toErrorInstance(this.server.globalThis)); return; } diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index 4b76f1e0ee..b98999929e 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -167,12 +167,12 @@ pub const Body = struct { if (value.onStartBuffering != null) { if (readable.isDisturbed(globalThis)) { form_data.?.deinit(); - readable.detachIfPossible(globalThis); + readable.value.unprotect(); value.readable = null; value.action = .{ .none = {} }; return JSC.JSPromise.rejectedPromiseValue(globalThis, globalThis.createErrorInstance("ReadableStream is already used", .{})); } else { - readable.detachIfPossible(globalThis); + readable.value.unprotect(); value.readable = null; } @@ -191,7 +191,7 @@ pub const Body = struct { else => unreachable, }; value.promise.?.ensureStillAlive(); - readable.detachIfPossible(globalThis); + readable.value.unprotect(); // js now owns the memory value.readable = null; @@ -394,7 +394,7 @@ pub const Body = struct { }, }; - this.Locked.readable.?.incrementCount(); + this.Locked.readable.?.value.protect(); return value; }, @@ -443,7 +443,7 @@ pub const Body = struct { .ptr = .{ .Bytes = &reader.context }, .value = reader.toReadableStream(globalThis), }; - locked.readable.?.incrementCount(); + locked.readable.?.value.protect(); if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { onReadableStreamAvailable(locked.task.?, locked.readable.?); @@ -581,7 +581,7 @@ pub const Body = struct { } pub fn fromReadableStreamWithoutLockCheck(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value { - readable.incrementCount(); + readable.value.protect(); return .{ .Locked = .{ .readable = readable, @@ -595,7 +595,7 @@ pub const Body = struct { if (to_resolve.* == .Locked) { var locked = &to_resolve.Locked; if (locked.readable) |readable| { - readable.done(global); + readable.done(); locked.readable = null; } @@ -814,7 +814,7 @@ pub const Body = struct { } if (locked.readable) |readable| { - readable.done(global); + readable.done(); locked.readable = null; } // will be unprotected by body value deinit @@ -855,7 +855,7 @@ pub const Body = struct { this.Locked.deinit = true; if (this.Locked.readable) |*readable| { - readable.done(this.Locked.global); + readable.done(); } } @@ -1361,12 +1361,12 @@ pub const BodyValueBufferer = struct { ); }, .Fulfilled => { - // defer stream.value.unprotect(); + defer stream.value.unprotect(); sink.handleResolveStream(false); }, .Rejected => { - // defer stream.value.unprotect(); + defer stream.value.unprotect(); sink.handleRejectStream(promise.result(globalThis.vm()), false); }, diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index a11f69e306..b737bd79f8 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -155,20 +155,20 @@ pub const ReadableStream = struct { return null; } - pub fn done(this: *const ReadableStream, globalThis: *JSGlobalObject) void { - this.detachIfPossible(globalThis); + pub fn done(this: *const ReadableStream) void { + this.value.unprotect(); } pub fn cancel(this: *const ReadableStream, globalThis: *JSGlobalObject) void { JSC.markBinding(@src()); ReadableStream__cancel(this.value, globalThis); - this.detachIfPossible(globalThis); + this.value.unprotect(); } pub fn abort(this: *const ReadableStream, globalThis: *JSGlobalObject) void { JSC.markBinding(@src()); ReadableStream__cancel(this.value, globalThis); - this.detachIfPossible(globalThis); + this.value.unprotect(); } pub fn forceDetach(this: *const ReadableStream, globalObject: *JSGlobalObject) void {